You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License.*/packageorg.apache.wayang.api.serialization.customserializersimportcom.fasterxml.jackson.core.{JsonParser, JsonProcessingException}
importcom.fasterxml.jackson.core.`type`.TypeReferenceimportcom.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer, JsonNode}
importcom.fasterxml.jackson.databind.jsontype.TypeDeserializerimportorg.apache.wayang.api.MultiContextimportorg.apache.wayang.api.serialization.SerializationUtils.mapperimportorg.apache.wayang.core.api.Configurationimportorg.apache.wayang.java.Javaimportorg.apache.wayang.postgres.Postgresimportorg.apache.wayang.spark.Sparkimportorg.apache.wayang.sqlite3.Sqlite3importjava.io.IOExceptionclassMultiContextDeserializerextendsJsonDeserializer[MultiContext] {
overridedefdeserializeWithType(p: JsonParser, ctxt: DeserializationContext, typeDeserializer: TypeDeserializer):AnyRef= {
this.deserialize(p, ctxt)
}
@throws[IOException]
@throws[JsonProcessingException]
overridedefdeserialize(jp: JsonParser, ctxt: DeserializationContext):MultiContext= {
// Deserialize each field of MultiContext separatelyvalnode:JsonNode= jp.getCodec.readTree(jp)
valconfigurationParser:JsonParser= node.get("configuration").traverse(jp.getCodec)
valconfiguration:Configuration= mapper.readValue(configurationParser, classOf[Configuration])
valsinkParser:JsonParser= node.get("sink").traverse(jp.getCodec)
valsink:Option[MultiContext.UnarySink] = mapper.readValue(sinkParser, newTypeReference[Option[MultiContext.UnarySink]]() {})
valpluginsParser:JsonParser= node.get("plugins").traverse(jp.getCodec)
valplugins:List[String] = mapper.readValue(pluginsParser, newTypeReference[List[String]]() {})
//// Create the whole deserialized multi context//// 1. Add configurationvalmultiContext=newMultiContext(configuration)
// 2. Add sink
sink match {
caseSome(MultiContext.TextFileSink(url)) =>
println(s"It's a TextFileSink with url: $url")
multiContext.withTextFileSink(url)
caseSome(MultiContext.ObjectFileSink(url)) =>
println(s"It's an ObjectFileSink with url: $url")
multiContext.withObjectFileSink(url)
caseNone=>
println("No sink defined")
case _ =>
println("Unknown sink type")
}
// TODO: Add all plugins// 3. Add pluginsvaljavaPluginName=Java.basicPlugin.getClass.getName
valsparkPluginName=Spark.basicPlugin.getClass.getName
valpostgresPluginName=Postgres.plugin().getClass.getName
// val flinkPluginName = Flink.basicPlugin().getClass.getNamevalsqlite3PluginName=Sqlite3.plugin().getClass.getName
plugins.foreach {
case pluginName if pluginName == javaPluginName => multiContext.register(Java.basicPlugin())
case pluginName if pluginName == sparkPluginName => multiContext.register(Spark.basicPlugin())
case pluginName if pluginName == postgresPluginName => multiContext.register(Postgres.plugin())
// case pluginName if pluginName == flinkPluginName => multiContext.register(Flink.basicPlugin())case pluginName if pluginName == sqlite3PluginName => multiContext.register(Sqlite3.plugin())
case _ => println("Unknown plugin detected")
}
multiContext
}
}
3faf7c0b35880576ada4079a1dfa5524f5522691
The text was updated successfully, but these errors were encountered:
Hey @zkaoudi , I have added two more plugins: Hadoop and GraphChi.
`// Add all plugins
val javaPluginName = Java.basicPlugin().getClass.getName
val sparkPluginName = Spark.basicPlugin().getClass.getName
val postgresPluginName = Postgres.plugin().getClass.getName
val sqlite3PluginName = Sqlite3.plugin().getClass.getName
val flinkPluginName = Flink.basicPlugin().getClass.getName
val hadoopPluginName = Hadoop.basicPlugin().getClass.getName
val graphchiPluginName = GraphChi.basicPlugin().getClass.getName
plugins.foreach {
case pluginName if pluginName == javaPluginName => multiContext.register(Java.basicPlugin())
case pluginName if pluginName == sparkPluginName => multiContext.register(Spark.basicPlugin())
case pluginName if pluginName == postgresPluginName => multiContext.register(Postgres.plugin())
case pluginName if pluginName == sqlite3PluginName => multiContext.register(Sqlite3.plugin())
case pluginName if pluginName == flinkPluginName => multiContext.register(Flink.basicPlugin())
case pluginName if pluginName == hadoopPluginName => multiContext.register(Hadoop.basicPlugin())
case pluginName if pluginName == graphchiPluginName => multiContext.register(GraphChi.basicPlugin())
case _ => println("Unknown plugin detected")
}`
Am I missing something? If that's all i had to do, can you please tell how do I get an approval to make the pool request. I am new to open source contributions. Thankyou.
Add all plugins
incubator-wayang/wayang-api/wayang-api-scala-java/src/main/scala/org/apache/wayang/api/serialization/customserializers/MultiContextDeserializer.scala
Line 78 in b67b404
3faf7c0b35880576ada4079a1dfa5524f5522691
The text was updated successfully, but these errors were encountered: