diff --git a/.classpath b/.classpath index d7a6c340..f8ee50e3 100644 --- a/.classpath +++ b/.classpath @@ -1,96 +1,155 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/.gitignore b/.gitignore index a3e8ca82..0c23c664 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ /bin/ /.worksheet/ /target/ -/project/ /.idea/ /project/target/ /project/project/ @@ -11,4 +10,5 @@ org.eclipse* .classpath *.iml *.ibc -**/node_modules/* \ No newline at end of file +**/node_modules/* +node_modules/ \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..58a03e67 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,11 @@ +{ + "configurations": [ + { + "type": "java", + "name": "CodeLens (Launch) - Test", + "request": "launch", + "mainClass": "Java.Test", + "projectName": "MohanLearningGround" + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..f1dc2e88 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "python.linting.pylintEnabled": true, + "python.linting.enabled": true +} \ No newline at end of file diff --git a/Excercise.sc b/Excercise.sc new file mode 100644 index 00000000..a89bbff6 --- /dev/null +++ b/Excercise.sc @@ -0,0 +1,42 @@ +object Excercise { + + def reduce[F[_],A](nums: F[A]): Int = { + ??? + } //> reduce: [F[_], A](nums: F[A])Int + + Some(5).map(_ + 1) //> res0: Option[Int] = Some(6) + + val studentMarks = List(120, 230, 340, 540, 560)//> studentMarks : List[Int] = List(120, 230, 340, 540, 560) + + val revaluationFun: Option[Int => Int] = Some({ mark: Int => + java.lang.Math.ceil(mark * 1.05).toInt + }) //> revaluationFun : Option[Int => Int] = Some(Excercise$$$Lambda$12/1757293506 + //| @28feb3fa) + + //val revaluation = (marks: Seq[Int], fn: Int => Int) => marks.map(fn) + val revaluation = (marks: Seq[Int], fn: Option[Int => Int]) => if (fn.isDefined) marks.map(fn.get) else marks.map(x => x) + //> revaluation : (Seq[Int], Option[Int => Int]) => Seq[Int] = Excercise$$$Lamb + //| da$13/459296537@7823a2f9 + + //val revisedMarks = revaluation(studentMarks, revaluationFun) + + val revisedMarks = revaluation(studentMarks, revaluationFun) + //> revisedMarks : Seq[Int] = List(126, 242, 357, 567, 588) + /** + * class { + * static [ + * } + * var int = 5 + * + * func intVarDecl = {} + * func charVarDecl = {} + * func VarDecl = inv + * + * } + * + * interace { + * } + * + */ + +} \ No newline at end of file diff --git a/FreeMonad.sc b/FreeMonad.sc new file mode 100644 index 00000000..dfd72ef7 --- /dev/null +++ b/FreeMonad.sc @@ -0,0 +1,117 @@ +object FreeMonad { + println("https://www.youtube.com/watch?v=7xSfLPD6tiQ") + println("Pure Functional Database Programming with Fixpoint Types—Rob Norris") + println("https://tpolecat.github.io/presentations/sw2016/slides.html#4") + //A recursive data type for professors and their Ph.D. students + object attempt_1 { + case class Prof( + name: String, + year: Int, + students: List[Prof]) + /* + How do we store the auto-generated primary key, Simple INT won't be right way to do it + CREATE TABLE prof ( + id INTEGER IDENTITY, + parent INTEGER NULL, + name VARCHAR NOT NULL, + year INTEGER NOT NULL, + FOREIGN KEY(parent) REFERENCES prof(id) + ) + */ + } + object attempt_2 { + /** + * id: Option[Int] - Represents three possibilities + * 1. Just constructed objected, no ID in DB + * 2. Just retrieved from DB, there is an id + * 3. In some computation, where DB-ID is not important for computation + */ + case class Prof( + id: Option[Int], + name: String, + year: Int, + students: List[Prof]) + /* + CREATE TABLE prof ( + id INTEGER IDENTITY, + parent INTEGER NULL, + name VARCHAR NOT NULL, + year INTEGER NOT NULL, + FOREIGN KEY(parent) REFERENCES prof(id) + ) + */ + } + object attempt_3 { + /** + * id: Let us handled ID separately + * But it creates new problem as we can't keep "students: List[Prof]" since it looses id + * it should be stored as "students: List[(Int, Prof)]" + */ + case class Prof( + name: String, + year: Int, + students: List[Prof]) + + type IdProf = (Int, Prof) + } + object attempt_4 { + /** + * (Int, Prof) - Was not sure if it make sense, alternatively we can avoid to find the Type by parameterizing + */ + case class ProfF[A]( + name: String, + year: Int, + students: List[A]) + + object attempt_4_1 { + //Here there is new problem - class ProfF takes type parameters + type Prof = ProfF[ProfF] + type IdProf = (Int, ProfF[(Int, ProfF)]) + } + object attempt_4_2 { + //Actually it is infinitely recursive + type Prof = ProfF[ProfF[ProfF[ProfF[ProfF]]]] + type IdProf = (Int, ProfF[(Int, ProfF[ProfF[ProfF[ProfF]]])]) + + } + /** Conclusion: Type aliases can't be recursive, but classes can be recursive */ + } + + object attempt_5 { + /** + * (Int, Prof) - Was not sure if it make sense, alternatively we can avoid to find the Type by parameterizing + */ + case class ProfF[A]( + name: String, + year: Int, + students: List[A]) + + object attempt_5_1 { + case class Prof(value: ProfF[Prof]) + case class IdProf(id: Int, prof: ProfF[IdProf]) + } + + object attempt_5_2 { + case class Prof[F[_]](value: F[Prof[F]]) + case class IdProf[F[_]](id: Int, prof: F[IdProf[F]]) + } + + } + + import scalaz._ + //Above types could be generalized further using following + case class Fix[F[_]](unfix: F[Fix[F]]) + case class Cofree[F[_], A](head: A, tail: F[Cofree[F, A]]) + case class Free[F[_], A](head: A \/ F[Free[F, A]]) + + object using_other_combinator { + case class CofreeF[F[_], A, B](head: A, tail: F[B]) + case class FreeF[F[_], A,B](resume: A \/ F[B]) + + //Below won't compile but ? means "I don't worry about any type" + //In typed method signature if _ comes, it means "partially applied" + type Cofree[F[_], A] = Fix[CofreeF[F, A, ?]] + type Free[F[_], A] = Fix[FreeF[F, A, ?]] + + } +} \ No newline at end of file diff --git a/KavinPrint.py b/KavinPrint.py new file mode 100644 index 00000000..07ea50b1 --- /dev/null +++ b/KavinPrint.py @@ -0,0 +1 @@ +[print(str(2) + "*" + str(x) + "= " + str(2*x)) for x in range(1,101) ] \ No newline at end of file diff --git a/build.sbt b/build.sbt index f1ebfe6e..5916bfe4 100644 --- a/build.sbt +++ b/build.sbt @@ -2,20 +2,34 @@ name := "MohanLearningGround" version := "1.0" -scalaVersion := "2.12.3" -val akkaVersion = "2.5.4" +scalaVersion := "2.12.6" +val akkaVersion = "2.5.18" +val akkaHttpVersion = "10.1.0" +val tomcatVersion = "9.0.14" //enablePlugins(ScalaJSPlugin) -initialize := { - val _ = initialize.value - val specVersion = sys.props("java.specification.version") - val required = "1.8" - assert(required == specVersion, "Java 1.8 or above required") -} +/* + initialize := { + val _ = initialize.value + val specVersion = sys.props("java.specification.version") + val required = "1.10" + assert(required == specVersion, "Java 1.10.1 or above required") + } +*/ transitiveClassifiers := Seq("sources") +sourceManaged in Compile := file("bin") + + +libraryDependencies ++= Seq( + "org.apache.tomcat" % "tomcat-catalina" % "9.0.14", + "org.apache.tomcat" % "tomcat" % tomcatVersion, + "org.apache.tomcat" % "tomcat-coyote" % tomcatVersion, + "org.apache.tomcat" % "tomcat-jasper" % tomcatVersion +) + libraryDependencies ++= Seq( "com.typesafe.scala-logging" %% "scala-logging" % "3.5.0" withSources(), "org.scalatra" %% "scalatra" % "2.5.+" withSources(), @@ -25,7 +39,14 @@ libraryDependencies ++= Seq( "javax.servlet" % "javax.servlet-api" % "4.0.0" % "provided" withSources(), "org.scalactic" % "scalactic_2.12" % "3.0.2" withSources(), "org.scalatest" % "scalatest_2.12" % "3.0.2" withSources(), - "org.scalacheck" %% "scalacheck" % "1.13.5" withSources() + "org.scalacheck" %% "scalacheck" % "1.13.5" withSources(), + "org.scalaj" %% "scalaj-http" % "2.3.0" withSources() +) + +//java library dependencies +libraryDependencies ++= Seq( + "com.vladsch.flexmark" % "flexmark-all" % "0.32.18" withSources(), + "org.jsoup" % "jsoup" % "1.11.2" withSources() ) logLevel := Level.Warn @@ -38,41 +59,54 @@ libraryDependencies ++= Seq( "org.scalaz" %% "scalaz-core" % "7.2.17" withSources() ) +libraryDependencies ++= Seq( + "com.jcraft" % "jsch" % "0.1.55" +) + +libraryDependencies ++= Seq( + "javax.xml.bind" % "jaxb-api" % "2.3.0", + "javax.annotation" % "javax.annotation-api" % "1.3.2", + "javax.el" % "javax.el-api" % "3.0.0", + "org.glassfish" % "javax.el" % "3.0.0" +) + libraryDependencies ++= Seq( "org.scala-lang" % "scala-reflect" % scalaVersion.value, "org.scala-lang" % "scala-compiler" % scalaVersion.value, "org.scala-lang" % "scala-reflect" % scalaVersion.value, - "org.scala-lang.modules" % "scala-parser-combinators_2.12" % "1.0.6" + "org.scala-lang.modules" % "scala-parser-combinators_2.12" % "1.1.1" ) -libraryDependencies <++= (scalaVersion)(sv => - Seq( +libraryDependencies ++= Seq( + "org.apache.commons" % "commons-text" % "1.3" withSources(), "org.apache.commons" % "commons-io" % "1.3.2" withSources(), "commons-lang" % "commons-lang" % "2.6" withSources(), - "com.fasterxml.jackson.core" % "jackson-core" % "2.9.2" withSources(), - "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.2", + "com.fasterxml.jackson.core" % "jackson-core" % "2.9.3" withSources(), + "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.3", "junit" % "junit" % "4.12" - )) +) libraryDependencies ++= Seq( "io.reactivex" %% "rxscala" % "0.26.5" withSources(), "io.reactivex" % "rxswing" % "0.27.0" withSources(), // for Swing Scheduler in suggestions - "org.json4s" %% "json4s-native" % "3.5.2" withSources(), + "org.json4s" %% "json4s-jackson" % "3.5.2" withSources(), "org.scala-lang.modules" %% "scala-swing" % "2.0.0" withSources(), -// "net.databinder.dispatch" % "dispatch-core_2.10" % "0.11.3", "org.scala-lang" % "scala-reflect" % scalaVersion.value, "org.slf4j" % "slf4j-api" % "1.7.5" withSources(), "org.slf4j" % "slf4j-simple" % "1.7.5" withSources(), "com.squareup.retrofit" % "retrofit" % "1.0.0" withSources(), "org.scala-lang.modules" %% "scala-async" % "0.9.6" ) + val depsAkka = Seq( "com.typesafe.akka" %% "akka-actor" % akkaVersion withSources(), "com.typesafe.akka" %% "akka-testkit" % akkaVersion withSources(), - "com.typesafe.akka" %% "akka-persistence" % akkaVersion withSources() + "com.typesafe.akka" %% "akka-persistence" % akkaVersion withSources(), + "com.typesafe.akka" %% "akka-stream" % akkaVersion withSources(), + "com.typesafe.akka" %% "akka-http" % akkaHttpVersion withSources() ) libraryDependencies ++= depsAkka diff --git a/project/build.properties b/project/build.properties new file mode 100644 index 00000000..3ffc1c9f --- /dev/null +++ b/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.2.7 diff --git a/project/plugins.sbt b/project/plugins.sbt index 1e04d47c..7a2dbb33 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,7 +1 @@ -logLevel := Level.Warn - -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.16") - -addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.0-RC13") - addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4") \ No newline at end of file diff --git a/readme.txt b/readme.txt index ded9bc59..7e46636f 100644 --- a/readme.txt +++ b/readme.txt @@ -1,5 +1,5 @@ set JAVA_OPTS=-Dhttp.proxySet=true -Dhttp.proxyHost=proxy.server.com -Dhttp.proxyPort=8080 -java -Xmx1024M -XX:MaxPermSize=1024M -Dsbt.log.noformat=true -Dsbt.repository.config=project/sbt.repositories -Dsbt.override.build.repos=true -jar build/sbt-launch.jar compile package publish +java -Xmx1G -Dsbt.log.noformat=true -Dsbt.repository.config=project/sbt.repositories -Dsbt.override.build.repos=true -jar build/sbt-launch.jar compile package publish ---- diff --git a/src/main/go/parse_xml.go b/src/main/go/parse_xml.go new file mode 100644 index 00000000..ce37dbe0 --- /dev/null +++ b/src/main/go/parse_xml.go @@ -0,0 +1,51 @@ +package main + +/* https://tutorialedge.net/golang/parsing-xml-with-golang/ */ + +import ( + "fmt" + "io/ioutil" + "os" + "encoding/xml" +) + +type Users struct { + XMLName xml.Name `xml:"users"` + Users []User `xml:"user"` +} + +type User struct { + XMLName xml.Name `xml:"user"` + Type string `xml:"type,attr"` + Name string `xml:"name"` + Social Social `xml:"social"` +} + +type Social struct { + XMLName xml.Name `xml:"social"` + Facebook string `xml:"facebook"` + Twitter string `xml:"twitter"` + Youtube string `xml:"youtube"` +} + +func main() { + xmlFile, err := os.Open("users.xml") + if err != nil { + fmt.Println(err) + } + + fmt.Println("Successfully Opened users.xml"); + defer xmlFile.Close(); + + byteValue, _ := ioutil.ReadAll(xmlFile) + + var users Users + xml.Unmarshal(byteValue, &users) + + for i := 0; i < len(users.Users); i++ { + fmt.Println("User Type: " + users.Users[i].Type) + fmt.Println("User Name: " + users.Users[i].Name) + fmt.Println("Facebook Url: " + users.Users[i].Social.Facebook) + } + +} \ No newline at end of file diff --git a/src/main/go/users.xml b/src/main/go/users.xml new file mode 100644 index 00000000..fe1eb5b5 --- /dev/null +++ b/src/main/go/users.xml @@ -0,0 +1,19 @@ + + + + Elliot + + https://facebook.com + https://twitter.com + https://youtube.com + + + + Fraser + + https://facebook.com + https://twitter.com + https://youtube.com + + + \ No newline at end of file diff --git a/src/main/java/Test.java b/src/main/java/Test.java new file mode 100644 index 00000000..a0654072 --- /dev/null +++ b/src/main/java/Test.java @@ -0,0 +1,2 @@ +public class Test { +} diff --git a/src/main/java/ground/java/UserInfo.java b/src/main/java/ground/java/UserInfo.java new file mode 100644 index 00000000..66dcba36 --- /dev/null +++ b/src/main/java/ground/java/UserInfo.java @@ -0,0 +1,34 @@ +package ground.java; + +public class UserInfo implements com.jcraft.jsch.UserInfo { + + @Override + public String getPassphrase() { + return null; + } + + @Override + public String getPassword() { + return "test"; + } + + @Override + public boolean promptPassword(String message) { + return false; + } + + @Override + public boolean promptPassphrase(String message) { + return false; + } + + @Override + public boolean promptYesNo(String message) { + return false; + } + + @Override + public void showMessage(String message) { + + } +} diff --git a/src/main/java/ground/java/rest/client/RestClient.java b/src/main/java/ground/java/rest/client/RestClient.java new file mode 100644 index 00000000..b44c74e1 --- /dev/null +++ b/src/main/java/ground/java/rest/client/RestClient.java @@ -0,0 +1,33 @@ +package ground.java.rest.client; + +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpClient.Redirect; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.net.http.HttpResponse.BodyHandler; +import java.net.http.HttpResponse.BodySubscribers; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Duration; + +public class RestClient { + public static void main(String args[]) throws InterruptedException { + var userName = "mohanmca"; + var user = "https://api.github.com/users/" + userName; + + HttpRequest request = HttpRequest.newBuilder().uri(URI.create(user)).build(); + HttpClient client = HttpClient.newBuilder().followRedirects(Redirect.ALWAYS) + .connectTimeout(Duration.ofSeconds(10)).build(); + + BodyHandler bodyHandler = (rspInfo) -> rspInfo.statusCode() == 200 + ? BodySubscribers.ofFile(Paths.get("c:/tmp/" + userName + ".json")) + : BodySubscribers.replacing(Paths.get("c:/tmp/" + userName + ".json")); + + //client.sendAsync(request, bodyHandler); + //client.sendAsync(request, System.out::println); + + client.sendAsync(request, bodyHandler).thenApply(HttpResponse::body).thenAccept(System.out::println); + + } +} diff --git a/src/main/java/ground/java/ssh.java b/src/main/java/ground/java/ssh.java new file mode 100644 index 00000000..008ee243 --- /dev/null +++ b/src/main/java/ground/java/ssh.java @@ -0,0 +1,41 @@ +package ground.java; + +import com.jcraft.jsch.*; + +import java.io.InputStream; + +public class ssh { + public static void main(String[] args) throws Exception { + JSch jsch=new JSch(); + Session session=jsch.getSession(user, host, 22); + UserInfo ui=new UserInfo(); + session.setUserInfo(ui); + session.connect(); + Channel channel=session.openChannel("exec"); + ((ChannelExec)channel).setCommand(command); + channel.setInputStream(null); + ((ChannelExec)channel).setErrStream(System.err); + + InputStream in=channel.getInputStream(); + + channel.connect(); + + + byte[] tmp=new byte[1024]; + while(true){ + while(in.available()>0){ + int i=in.read(tmp, 0, 1024); + if(i<0)break; + System.out.print(new String(tmp, 0, i)); + } + if(channel.isClosed()){ + if(in.available()>0) continue; + System.out.println("exit-status: "+channel.getExitStatus()); + break; + } + try{Thread.sleep(1000);}catch(Exception ee){} + } + channel.disconnect(); + session.disconnect(); + } +} diff --git a/src/main/java/ground/learning/java/coffee/CoffeScriptOnJdk.java b/src/main/java/ground/learning/java/coffee/CoffeScriptOnJdk.java index f19eaca5..3281fbe5 100644 --- a/src/main/java/ground/learning/java/coffee/CoffeScriptOnJdk.java +++ b/src/main/java/ground/learning/java/coffee/CoffeScriptOnJdk.java @@ -31,8 +31,8 @@ public static ScriptEngine getLoadedEngine() { InputStream jqueryStream = CoffeScriptOnJdk.class.getClass() .getResourceAsStream("/js/jquery-2.1.3.js"); - SequenceInputStream is = new SequenceInputStream(coffeeStream, - jqueryStream); +// SequenceInputStream is = new SequenceInputStream(coffeeStream, +// jqueryStream); engine.eval(new InputStreamReader(coffeeStream)); } catch (ScriptException e) { throw new RuntimeException(e); diff --git a/src/main/java/ground/learning/java/text/CosineText.java b/src/main/java/ground/learning/java/text/CosineText.java new file mode 100644 index 00000000..b3bbd8c8 --- /dev/null +++ b/src/main/java/ground/learning/java/text/CosineText.java @@ -0,0 +1,27 @@ +package ground.learning.java.text; + +import java.util.Arrays; +import java.util.Map; +import java.util.function.Function; + +import org.apache.commons.text.similarity.CosineSimilarity; + +import static java.util.stream.Collectors.*; + +public class CosineText { + + static String record = "The package descriptions in the JavaDoc give an overview of the available features and various project reports are provided."; + static String duplicate_record = "The descriptions of the package in the JavaDoc give an overview of features and various project reports are provided."; + + public static void main(String[] args) { + System.out.println("starting"); + Map r1 = Arrays.stream(record.split(" ")).collect(groupingBy(Function.identity(), summingInt(x -> 1))); + Map r2 = Arrays.stream(duplicate_record.split(" ")).collect(groupingBy(Function.identity(), summingInt(x -> 1))); + Map t1 = (Map)(Map) r1; + Map t2 = (Map)(Map) r2; + CosineSimilarity similarity = new CosineSimilarity(); + Object t = similarity.cosineSimilarity(t1, t2); + System.out.println(t); + } + +} diff --git a/src/main/java/launch/StartTomcatServer.java b/src/main/java/launch/StartTomcatServer.java new file mode 100644 index 00000000..f448c174 --- /dev/null +++ b/src/main/java/launch/StartTomcatServer.java @@ -0,0 +1,57 @@ +package launch; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.util.Optional; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.catalina.startup.Tomcat; + +public class StartTomcatServer { + + public static void main(String[] args) throws Exception { + String contextPath = "/"; + String webappDirLocation = "src/main/webapp/"; + String webappDirAbsolutePath = new File(webappDirLocation).getAbsolutePath(); + + //The port that we should run on can be set into an environment variable + //Look for that variable and default to 8080 if it isn't there. + Optional port = Optional.ofNullable(System.getenv("PORT")); + + try { + Tomcat tomcat = new Tomcat(); + tomcat.setPort(Integer.valueOf(port.orElse("80"))); + tomcat.addWebapp(contextPath, webappDirAbsolutePath); + tomcat.start(); + System.out.println("TOMCAT SERVER STARTED >>>>>>>>>>>>>>>"); + tomcat.getServer().await(); + } catch (Exception exp) { + exp.printStackTrace(); + System.exit(100); + } + } + + public static void addDebugServlet(Tomcat tomcat) { + + @SuppressWarnings("serial") + HttpServlet servlet = new HttpServlet() { + @Override + protected void doGet(HttpServletRequest req, HttpServletResponse resp) + throws ServletException, IOException { + PrintWriter writer = resp.getWriter(); + + writer.println("Welcome"); + writer.println("

Have a Great Day!

"); + writer.println(""); + } + }; + + tomcat.addServlet("/go", "debugServlet", servlet); + } + +} diff --git a/src/main/java/oms/Order.java b/src/main/java/oms/Order.java new file mode 100644 index 00000000..75c644d3 --- /dev/null +++ b/src/main/java/oms/Order.java @@ -0,0 +1,98 @@ +package oms; + +import java.util.Optional; +import java.util.Random; + +public class Order { + // dummy for + static Random r = new java.util.Random(System.currentTimeMillis()); + private long orderId; + private int version; + private int price; + private String userId; + private OrderType type; + + private Order() { + } + + public int getPrice() { + return price; + } + + public void setPrice(int price) { + this.price = price; + } + + public long getOrderId() { + return orderId; + } + + public void setOrderId(long orderId) { + this.orderId = orderId; + } + + public int getVersion() { + return version; + } + + public void setVersion(int version) { + this.version = version; + } + + public String getUserId() { + return userId; + } + + public void setUserId(String userId) { + this.userId = userId; + } + + public OrderType getType() { + return type; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (int) (orderId ^ (orderId >>> 32)); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Order other = (Order) obj; + if (orderId != other.orderId) + return false; + return true; + } + + public void setType(OrderType type) { + this.type = type; + } + + public static Order create(Optional modelOrder) { + Order o = new Order(); + if (modelOrder.isPresent()) { + o.setUserId(modelOrder.get().getUserId()); + o.setOrderId(modelOrder.get().getOrderId()); + o.setPrice(modelOrder.get().getPrice()); + o.setVersion(modelOrder.get().getVersion() + 1); + o.setType(modelOrder.get().getType()); + } else { + o.setUserId("userId"); + o.setOrderId(System.currentTimeMillis()); + o.setPrice(r.nextInt(5)); + o.setType(OrderType.SELL); + o.setVersion(1); + } + return o; + } + +} diff --git a/src/main/java/oms/OrderProcessor.java b/src/main/java/oms/OrderProcessor.java new file mode 100644 index 00000000..259b8352 --- /dev/null +++ b/src/main/java/oms/OrderProcessor.java @@ -0,0 +1,77 @@ +package oms; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentSkipListSet; +import java.util.logging.Logger; + +/** + * Best Buy Order - Highest bid price + * ---- + * Best Sell Order - Lowest asking price + * + * @author mohan + */ +public class OrderProcessor { + + private static Logger logger = Logger.getLogger("com.wombat.nose"); + + private ConcurrentSkipListSet bestSell = new ConcurrentSkipListSet(new BestOrder().reversed()); + private ConcurrentSkipListSet bestBuy = new ConcurrentSkipListSet(new BestOrder()); + private Map orders = new ConcurrentHashMap(); + private static OrderProcessor _instance = new OrderProcessor(); + + private OrderProcessor() { + } + + public static OrderProcessor getInstance() { + return _instance; + } + + public void clear() { + bestSell.clear(); + bestBuy.clear(); + orders.clear(); + } + + public void processOrder(Order order) { + Order oldOrder = orders.put(order.getOrderId(), order); + if (oldOrder == null || !oldOrder.equals(order)) { + logger.finer("New order inserted! - oldOrder " + oldOrder + ", new order" + order.toString()); + } else { + logger.finer("Existing order updated!" + order); + } + trigger(order); + } + + private void trigger(Order order) { + if (order.getType() == OrderType.BUY) { + bestBuy.add(order); + } else { + bestSell.add(order); + } + } + + static class BestOrder implements java.util.Comparator { + @Override + public int compare(Order o1, Order o2) { + return o1.getPrice() - o2.getPrice(); + } + } + + public int traderBlotterSize() { + return bestSell.size() + bestBuy.size(); + } + + public Order bestOrder(OrderType type) { + if (type == OrderType.BUY) { + return bestBuy.last(); + } + return bestSell.last(); + } + + public static void main(String args[]) { + + } + +} diff --git a/src/main/java/oms/OrderType.java b/src/main/java/oms/OrderType.java new file mode 100644 index 00000000..23c85508 --- /dev/null +++ b/src/main/java/oms/OrderType.java @@ -0,0 +1,5 @@ +package oms; + +public enum OrderType { + BUY, SELL; +} diff --git a/src/main/js/.babelrc b/src/main/js/.babelrc new file mode 100644 index 00000000..df88952e --- /dev/null +++ b/src/main/js/.babelrc @@ -0,0 +1,3 @@ +{ + "presets": ["env"] +} \ No newline at end of file diff --git a/src/main/js/.bablerc b/src/main/js/.bablerc deleted file mode 100644 index dc1bc4f9..00000000 --- a/src/main/js/.bablerc +++ /dev/null @@ -1,3 +0,0 @@ -{ - "presets": ["es2015"] -} \ No newline at end of file diff --git a/src/main/js/.vscode/launch.json b/src/main/js/.vscode/launch.json index 2c0203be..a053d234 100644 --- a/src/main/js/.vscode/launch.json +++ b/src/main/js/.vscode/launch.json @@ -1,12 +1,14 @@ { - // Use IntelliSense to learn about possible Node.js debug attributes. + // Use IntelliSense to learn about possible attributes. // Hover to view descriptions of existing attributes. // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 "version": "0.2.0", - "configurations": [{ - "type": "node", - "request": "launch", - "name": "Launch Program", - "program": "${workspaceRoot}\\Canvas.js" - }] + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Launch Program", + "program": "${file}" + } + ] } \ No newline at end of file diff --git a/src/main/js/SampleSession.js b/src/main/js/SampleSession.js new file mode 100644 index 00000000..0b7b0c54 --- /dev/null +++ b/src/main/js/SampleSession.js @@ -0,0 +1,51 @@ +let data = [ + { + "type": "employee", + "name": "Mohan", "phone": "2932382", "bob": 24 + }, + { + "type": "employee", + "name": "Mohan", "phone": "2932382", "age": 24 + }, + { + "type": "employee", + "name": "Mohan", "title": "President", "age": 24 + }, + { + "type": "customer", + "name": "Mohan", "title": "President", "age": 24 + }, + { + "type": "customer", + "name": "Mohan", "age": 24 + } +] + + +function groupBy(list, keyGetter) { + const map = new Map(); + list.forEach((item) => { + const key = keyGetter(item); + const collection = map.get(key); + if (!collection) { + map.set(key, [item]); + } else { + collection.push(item); + } + }); + return map; +} + +let dataByIndex = groupBy(data, data => data.type); +const onlyUnique = (value, index, self) => self.indexOf(value) === index +let reducer = (accu, data) => {Array.prototype.push.apply(accu, Object.keys(data)); return accu } + +let parseTypeVsColumns = function (data) { + let dataByIndex = groupBy(data, data => data.type); + let _t = {} + let _data = [...dataByIndex.keys()] + _data.forEach(type => _t[type] = dataByIndex.get(type).reduce(reducer,[]).filter(onlyUnique) ) + return _t; +} + +console.log(parseTypeVsColumns(data)) \ No newline at end of file diff --git a/src/main/js/asyncGenerator.js b/src/main/js/asyncGenerator.js new file mode 100644 index 00000000..3f164ed0 --- /dev/null +++ b/src/main/js/asyncGenerator.js @@ -0,0 +1,46 @@ +import fetch from 'node-fetch'; +//import 'whatwg-fetch'; +import http from 'http' + +/** + * Below code works on Chrome, but not in nodejs + */ +async function* streamAsyncIterator(response) { + const reader = response.body.getReader(); + try { + while (true) { + // Read from the stream + const { done, value } = await reader.read(); + // Exit if we're done + if (done) return; + // Else yield the chunk + yield value; + } + } + finally { + reader.releaseLock(); + } +} + +async function example() { + const find = 'J'; + const findCode = find.codePointAt(0); + const response = await fetch('https://html.spec.whatwg.org'); + let bytes = 0; + + for await (const chunk of streamAsyncIterator(response.body)) { + const index = chunk.indexOf(findCode); + + if (index != -1) { + bytes += index; + console.log(`Found ${find} at byte ${bytes}.`); + break; + } + + bytes += chunk.length; + } + + response.body.cancel(); +} + +example(); \ No newline at end of file diff --git a/src/main/js/asyncGenerator2.js b/src/main/js/asyncGenerator2.js new file mode 100644 index 00000000..641c020a --- /dev/null +++ b/src/main/js/asyncGenerator2.js @@ -0,0 +1,187 @@ +'use strict'; + +var _asyncGenerator = function () { function AwaitValue(value) { this.value = value; } function AsyncGenerator(gen) { var front, back; function send(key, arg) { return new Promise(function (resolve, reject) { var request = { key: key, arg: arg, resolve: resolve, reject: reject, next: null }; if (back) { back = back.next = request; } else { front = back = request; resume(key, arg); } }); } function resume(key, arg) { try { var result = gen[key](arg); var value = result.value; if (value instanceof AwaitValue) { Promise.resolve(value.value).then(function (arg) { resume("next", arg); }, function (arg) { resume("throw", arg); }); } else { settle(result.done ? "return" : "normal", result.value); } } catch (err) { settle("throw", err); } } function settle(type, value) { switch (type) { case "return": front.resolve({ value: value, done: true }); break; case "throw": front.reject(value); break; default: front.resolve({ value: value, done: false }); break; } front = front.next; if (front) { resume(front.key, front.arg); } else { back = null; } } this._invoke = send; if (typeof gen.return !== "function") { this.return = undefined; } } if (typeof Symbol === "function" && Symbol.asyncIterator) { AsyncGenerator.prototype[Symbol.asyncIterator] = function () { return this; }; } AsyncGenerator.prototype.next = function (arg) { return this._invoke("next", arg); }; AsyncGenerator.prototype.throw = function (arg) { return this._invoke("throw", arg); }; AsyncGenerator.prototype.return = function (arg) { return this._invoke("return", arg); }; return { wrap: function wrap(fn) { return function () { return new AsyncGenerator(fn.apply(this, arguments)); }; }, await: function _await(value) { return new AwaitValue(value); } }; }(); //import fetch from 'node-fetch'; + + +var streamAsyncIterator = function () { + var _ref = _asyncGenerator.wrap( /*#__PURE__*/regeneratorRuntime.mark(function _callee(response) { + var reader, _ref2, done, value; + + return regeneratorRuntime.wrap(function _callee$(_context) { + while (1) { + switch (_context.prev = _context.next) { + case 0: + reader = response.body.getReader(); + _context.prev = 1; + + case 2: + if (!true) { + _context.next = 14; + break; + } + + _context.next = 5; + return _asyncGenerator.await(reader.read()); + + case 5: + _ref2 = _context.sent; + done = _ref2.done; + value = _ref2.value; + + if (!done) { + _context.next = 10; + break; + } + + return _context.abrupt('return'); + + case 10: + _context.next = 12; + return value; + + case 12: + _context.next = 2; + break; + + case 14: + _context.prev = 14; + + reader.releaseLock(); + return _context.finish(14); + + case 17: + case 'end': + return _context.stop(); + } + } + }, _callee, this, [[1,, 14, 17]]); + })); + + return function streamAsyncIterator(_x) { + return _ref.apply(this, arguments); + }; +}(); + +var example = function () { + var _ref3 = _asyncToGenerator( /*#__PURE__*/regeneratorRuntime.mark(function _callee2() { + var find, findCode, response, bytes, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, _value, chunk, index; + + return regeneratorRuntime.wrap(function _callee2$(_context2) { + while (1) { + switch (_context2.prev = _context2.next) { + case 0: + find = 'J'; + findCode = find.codePointAt(0); + _context2.next = 4; + return fetch('https://html.spec.whatwg.org'); + + case 4: + response = _context2.sent; + bytes = 0; + _iteratorNormalCompletion = true; + _didIteratorError = false; + _iteratorError = undefined; + _context2.prev = 9; + _iterator = _asyncIterator(streamAsyncIterator(response.body)); + + case 11: + _context2.next = 13; + return _iterator.next(); + + case 13: + _step = _context2.sent; + _iteratorNormalCompletion = _step.done; + _context2.next = 17; + return _step.value; + + case 17: + _value = _context2.sent; + + if (_iteratorNormalCompletion) { + _context2.next = 29; + break; + } + + chunk = _value; + index = chunk.indexOf(findCode); + + if (!(index != -1)) { + _context2.next = 25; + break; + } + + bytes += index; + console.log('Found ' + find + ' at byte ' + bytes + '.'); + return _context2.abrupt('break', 29); + + case 25: + + bytes += chunk.length; + + case 26: + _iteratorNormalCompletion = true; + _context2.next = 11; + break; + + case 29: + _context2.next = 35; + break; + + case 31: + _context2.prev = 31; + _context2.t0 = _context2['catch'](9); + _didIteratorError = true; + _iteratorError = _context2.t0; + + case 35: + _context2.prev = 35; + _context2.prev = 36; + + if (!(!_iteratorNormalCompletion && _iterator.return)) { + _context2.next = 40; + break; + } + + _context2.next = 40; + return _iterator.return(); + + case 40: + _context2.prev = 40; + + if (!_didIteratorError) { + _context2.next = 43; + break; + } + + throw _iteratorError; + + case 43: + return _context2.finish(40); + + case 44: + return _context2.finish(35); + + case 45: + + response.body.cancel(); + + case 46: + case 'end': + return _context2.stop(); + } + } + }, _callee2, this, [[9, 31, 35, 45], [36,, 40, 44]]); + })); + + return function example() { + return _ref3.apply(this, arguments); + }; +}(); + +require('whatwg-fetch'); + +function _asyncIterator(iterable) { if (typeof Symbol === "function") { if (Symbol.asyncIterator) { var method = iterable[Symbol.asyncIterator]; if (method != null) return method.call(iterable); } if (Symbol.iterator) { return iterable[Symbol.iterator](); } } throw new TypeError("Object is not async iterable"); } + +function _asyncToGenerator(fn) { return function () { var gen = fn.apply(this, arguments); return new Promise(function (resolve, reject) { function step(key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { return Promise.resolve(value).then(function (value) { step("next", value); }, function (err) { step("throw", err); }); } } return step("next"); }); }; } + +example(); diff --git a/src/main/js/dailyJs.js b/src/main/js/dailyJs.js new file mode 100644 index 00000000..581f8d2b --- /dev/null +++ b/src/main/js/dailyJs.js @@ -0,0 +1,165 @@ +/** + * var table = document.getElementById("ranking") + * console.log(hTableToJson(table)) + */ + +function hTableToJson(table) { + var rows = [...table.rows] + var content = rows.map(row => Array.from(row.cells).map(cell => cell.innerText)) + var headers = content[0] + var result = content.map(record => { var r = {}; record.map( (v,i,a) => r[headers[i]] = v.replace(/^\s+|\s+$/g,'') ); delete r[""]; return r } ) + return JSON.stringify(result, null, 2); +} + +function downloadURI(uri, name) { + var link = document.createElement("a"); + link.download = name; + link.href = uri; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + delete link; +} + +function download(url) { + xmlhttp=new XMLHttpRequest() + xmlhttp.open("GET", url, false) + xmlhttp.send() + return xmlhttp.responseText +} + +function parseHtml(string) { + let parser = new DOMParser() + let contentDom = parser.parseFromString(content, "text/xml") + return contentDom; +} + +/** + * @param {*} scriptUrl + * addScript('https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.5.1/lodash.js') + * _.filter([1,2,3], x => x%2 == 0) + * addScript('https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.4.2/Rx.js') + * Rx.Observable.range(1,3) + * + */ +function addScript(scriptUrl) { + var script = document.createElement('script'); + script.type = 'text/javascript'; + script.src = scriptUrl; + document.head.appendChild(script); +} + +/** + +```Javascript +var jsdom = require('jsdom') +jsdom.env({ + url: "http://news.ycombinator.com/", + scripts: ["http://code.jquery.com/jquery.js"], + done: function (err, window) { + var $ = window.$; + console.log("HN Links"); + $("td.title:not(:last) a").each(function() { + console.log(" -", $(this).text()); + }); + } +}); +``` + + + +```Javascript +var packages = [] +var nodes = document.getElementsByClassName("css-truncate-target") +for(var i=0; i< nodes.length;i++){ if(nodes[i].children[0]) packages.push(nodes[i].children[0].text) } +packages.map( _ => "apm install " + _).join("\n") +"apm install " + packages.join(", ") +``` + + +* Copy list of links from google chrome + +```Javascript +const onlyUnique = (value, index, self) => self.indexOf(value) === index +var container = document.evaluate('//*[@id="rso"]/div/div', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null) +var results = Array.from(container.singleNodeValue.children) +var resultLinks = results.map(childNode => document.evaluate('div/div/h3/a', childNode, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null)) +resultLinks.map( link => link.singleNodeValue.href) + +var result = resultLinks.map(x => x.singleNodeValue.href).filter(onlyUnique).join("\r\n") +copy(result) +``` + + +* Copy list of links from ycombinator articles - Hacker news +```Javascript +// Navigate to - https://news.ycombinator.com/item?id=15154903 or https://news.ycombinator.com/item?id=16745042 +const container = document.evaluate('//a', document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE , null) +const items = Array.from(Array(container.snapshotLength).keys()) +const _links = items.map(i => container.snapshotItem(i).innerHTML).filter(text => text.indexOf("http")!=-1) +const links = _links.sort().filter((e,i,a) => a.indexOf(e)==i) +console.log("Unique links \n" + links.join("\n")) +let map = {} +_links.sort().map(link => link.substring(0,100)).forEach(link => map[link] = (map[link] || 0) + 1) +Object.entries(map).sort(kv => kv[1]).map(kv => kv[1].toString().padEnd(4) + kv[0]).join("\n") + + +console.log(links.join("\n")) +``` + + + +### How to extract youtube motivational speech text + +* https://www.youtube.com/watch?v=I22Lf0xF0UE and skip advertisement +* find "...More" button, and click that button and select "Transcript" +* Select english as language +* press f12 > select console +* paste below lines of code, and get transcript. + + +```Javascript +{ + let transcriptLines = [...document.getElementById("transcript-scrollbox").childNodes] + let text = transcriptLines.map(text => text.childNodes[1].innerHTML) + let result = text.filter(text => text.indexOf("MUSIC") == -1) + console.log(result) +} +``` + + ### Format all the json files in a directory +```Javascript +{ +let files = fs.readdirSync("c:/git") +files.forEach(f => { +let content = fs.readFileSync(fs); +let object = JSON.parse(content); +fs.writeFileSync(f, JSON.stringify(object, null, 2)); +}) +} +``` + + + ```Javascript + { + let transcript = [...document.getElementsByClassName("ytd-transcript-renderer")].filter(element => element.id == "body")[0].innerText + let result = transcript.split("\n").filter(text => text.indexOf(":") == -1).filter(text => text.toLocaleLowerCase().indexOf("music") == -1) + console.log(result.join("\n")) +} + ``` + + + */ + + + + +let dailyJs = { + hTableToJson, + downloadURI, + download, + addScript, + parseHtml +} + +module.exports = dailyJs \ No newline at end of file diff --git a/src/main/js/driver.js b/src/main/js/driver.js new file mode 100644 index 00000000..7901914d --- /dev/null +++ b/src/main/js/driver.js @@ -0,0 +1,81 @@ +/** + * @name screenshots + * + * @desc Snaps a basic screenshot of the full New York Time homepage and saves it a .png file. + * + * @see {@link https://github.com/GoogleChrome/puppeteer/blob/master/docs/api.md#screenshot} + */ + +const fs = require('fs'); +const puppeteer = require('puppeteer'); +const selectRecords = require('./selectRecords'); +const selectPastRecords = require('./selectPastRecords'); + +const timeString = (new Date).toISOString().slice(0,16).replace(/-|:/g,'_') +const outFileName = `rippedRecords_${timeString}.json` +const jsonStream = fs.createWriteStream(outFileName); +let content = [] + +async function run() { + const browser = await puppeteer.launch({ + headless: true, + devtools: false + }) + const page = await browser.newPage() + const resolutions = [{ + width: 1280, + height: 800 + }, { + width: 1920, + height: 1080 + }] + await page.setViewport(resolutions[1]) + await page.on('console', msg => console.log('PAGE LOG:', msg.text())); + await page.addScriptTag({ + path: './helperFunctions.js' + }); + await page.goto('http://www.religareonline.com/market/stock/nse-bse-bulk-deals') + await page.on('console', msg => console.log('PAGE LOG:', msg.text())); + let records = await page.evaluate(selectRecords) + records.result.forEach((record) => content.push(record)) + let numberOfTables = (records.lastPage) + //let numberOfTables = 10 + for (let j = 2; j <= numberOfTables; j++) { + await page.evaluate((txtPageNumber) => { + document.querySelector('#txtPageNumber').value = txtPageNumber; + document.querySelector('a.pgbtn-next').click(); + }, j); + + await page.waitForSelector('#LibTblBulkDeals_BSE_Recent_Listing') + let values = await page.evaluate(selectPastRecords) + values.result.forEach((record) => content.push(record)) + console.log("Size of the content " + content.length) + } + + return content; + +} + +function fixColumn(o) { + Object.keys(o).map(key => { + let temp = o[key]; + delete o[key]; + key = key.replace(/,/g, '_') + .replace(/\n/g, '_').replace(/\//g, 'O').replace(/\s+/g, '_') + .replace(/\(/g, '') + .replace(/\)/g, '') + .replace(/_+/g, '_') + o[key] = temp + }) + return o; +} + +run().then((content) => { + let output = content.map(fixColumn) + let result = { + output + } + jsonStream.write(JSON.stringify(result, '', 2)); + jsonStream.close(); + process.exit() +}) \ No newline at end of file diff --git a/src/main/js/dummy.json b/src/main/js/dummy.json new file mode 100644 index 00000000..2925e124 --- /dev/null +++ b/src/main/js/dummy.json @@ -0,0 +1,45 @@ +{ + "_id": "5c570320ac9687ae75fab3d9", + "index": 0, + "guid": "c14dd01a-956e-470c-b747-71c46f73adf9", + "isActive": false, + "balance": "$2,098.77", + "picture": "http://placehold.it/32x32", + "age": 24, + "eyeColor": "brown", + "name": "Key Black", + "gender": "male", + "company": "HONOTRON", + "email": "keyblack@honotron.com", + "phone": "+1 (946) 404-2188", + "address": "905 Euclid Avenue, Lodoga, Illinois, 9336", + "about": "Eiusmod aliqua dolore pariatur minim amet. Nisi nostrud ut officia ad sit. Duis anim consequat laborum eu labore id.\r\n", + "registered": "2018-03-10T06:46:30 -08:00", + "latitude": 57.864271, + "longitude": -57.705847, + "tags": [ + "Lorem", + "dolor", + "eiusmod", + "nulla", + "minim", + "ea", + "reprehenderit" + ], + "friends": [ + { + "id": 0, + "name": "Avis Simpson" + }, + { + "id": 1, + "name": "Fletcher Giles" + }, + { + "id": 2, + "name": "Estella Gregory" + } + ], + "greeting": "Hello, Key Black! You have 8 unread messages.", + "favoriteFruit": "strawberry" +} diff --git a/src/main/js/helperFunctions.js b/src/main/js/helperFunctions.js new file mode 100644 index 00000000..7c3afcfe --- /dev/null +++ b/src/main/js/helperFunctions.js @@ -0,0 +1,20 @@ +window.$x = xPath => document.evaluate( + xPath, + document, + null, + XPathResult.FIRST_ORDERED_NODE_TYPE, + null +).singleNodeValue; + +window.hTableToJson = function (table) { + var rows = Array.from(table.rows) + var content = rows.map(row => Array.from(row.cells).map(cell => cell.innerText)) + var headers = content[0] + var result = content.map(record => { + var r = {}; + record.map((v, i, a) => r[headers[i]] = v.replace(/^\s+|\s+$/g, '')); + delete r[""]; + return r + }) + return result; +} \ No newline at end of file diff --git a/src/main/js/js_threads.js b/src/main/js/js_threads.js new file mode 100644 index 00000000..e953960b --- /dev/null +++ b/src/main/js/js_threads.js @@ -0,0 +1,45 @@ +const fs = require('fs') + +function reverse(content) { + let data = content.split("") + return data.map( (char,index) => data[data.length -index-1]).join('') +} + +let content = fs.readFileSync("dummy.json").toString('utf-8'); + +function getJson() { + return content; +} + +function time(func) { + var t1 = (new Date()).getTime(); + let output = func(); + var t2 = (new Date()).getTime() + console.log("Time taken to complete " + func.name + ". " + (t2-t1)); + return output; + +} + +function process() { + console.log("Starting costly operation "); + let json = getJson(); + let tenNumbers = [...Array(99999).keys()] + let input = tenNumbers.map(number => getJson().replace("e","EE") ) + input = input.map(json => reverse(json)) + return input; +} +let boundFun = () => time(process) + +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); +setTimeout(boundFun, 0); + + +console.log("Completed!") \ No newline at end of file diff --git a/src/main/js/package.json b/src/main/js/package.json index c501f8a7..a1a6b1a6 100644 --- a/src/main/js/package.json +++ b/src/main/js/package.json @@ -12,10 +12,29 @@ "run": "babel-node Canvas.js" }, "author": "", + "babel": { + "presets": [ + "env" + ], + "plugins": [ + "transform-async-generator-functions" + ] + }, "license": "ISC", "devDependencies": { - "babel-preset-es2015": "^6.18.0", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-preset-env": "^1.6.1", + "fetch": "^1.1.0", + "node-fetch": "^2.0.0", + "nodemon": "1.18.9", "npm-watch": "^0.2.0", - "ramda": "^0.22.1" + "ramda": "^0.22.1", + "whatwg-fetch": "^2.0.3" + }, + "dependencies": { + "babel-plugin-transform-async-to-generator": "^6.24.1", + "babel-polyfill": "^6.26.0", + "puppeteer": "^1.3.0", + "csv-write-stream": "^2.0.0" } } diff --git a/src/main/js/puppeteer.js b/src/main/js/puppeteer.js new file mode 100644 index 00000000..2ddae3e5 --- /dev/null +++ b/src/main/js/puppeteer.js @@ -0,0 +1,85 @@ +/** + * @name screenshots + * + * @desc Snaps a basic screenshot of the full New York Time homepage and saves it a .png file. + * + * @see {@link https://github.com/GoogleChrome/puppeteer/blob/master/docs/api.md#screenshot} + */ + +const fs = require('fs'); +const puppeteer = require('puppeteer'); +const jsonStream = fs.createWriteStream('myOutput.json'); + + +let result = (async () => { + const browser = await puppeteer.launch({ + headless: true, + devtools: false + }) + const page = await browser.newPage() + const resolutions = [{ + width: 1280, + height: 800 + }, { + width: 1920, + height: 1080 + }] + await page.setViewport(resolutions[1]) + await page.on('console', msg => console.log('PAGE LOG:', msg.text())); + await page.goto('http://www.religareonline.com/market/stock/nse-bse-bulk-deals') + await page.screenshot({ + path: 'bulk-deals_1.png', + fullPage: true + }) + + await page.evaluate(() => { + window.hTableToJson = function (table) { + var rows = Array.from(table.rows) + var content = rows.map(row => Array.from(row.cells).map(cell => cell.innerText)) + var headers = content[0] + var result = content.map(record => { + var r = {}; + record.map((v, i, a) => r[headers[i]] = v.replace(/^\s+|\s+$/g, '')); + delete r[""]; + return r + }) + return result; + } + + window.writeFile = function(path, data, opts='utf8') { + new Promise((res, rej) => { + fs.writeFile(path, data, opts, (err) => { + if (err) rej(err) + else res() + }) + }) + }; + }) + + await page.on('console', msg => console.log('PAGE LOG:', msg.text())); + const bulkDeals1 = await page.evaluate(() => { + var table = document.getElementById("fnCtrBulkDealsListingLibTblBulkDeals_BSE_Recent_Listing") + return hTableToJson(table) + }) + + await page.type("#txtPageNumber", "2") + await page.click("a.pgbtn-next") + await page.waitForSelector('#fnCtrBulkDealsListingLibTblBulkDeals_BSE_Recent_Listing') + const bulkDeals2 = await page.evaluate(() => { + var table = document.getElementById("fnCtrBulkDealsListingLibTblBulkDeals_BSE_Recent_Listing") + return hTableToJson(table) + }) + + let bulkDeals = await [].concat(bulkDeals2).concat(bulkDeals1); + let content = await JSON.stringify(bulkDeals, '', 2) + + await jsonStream.write(JSON.stringify(content,'',2)) + + return { + content: content, + result: "test" + } +})() + + +result.then((content) => console.log(content)) \ No newline at end of file diff --git a/src/main/js/readme.md b/src/main/js/readme.md new file mode 100644 index 00000000..2327681e --- /dev/null +++ b/src/main/js/readme.md @@ -0,0 +1,2 @@ +* npm install babel-preset-env --save-dev +* \ No newline at end of file diff --git a/src/main/js/recorder.js b/src/main/js/recorder.js new file mode 100644 index 00000000..523fa543 --- /dev/null +++ b/src/main/js/recorder.js @@ -0,0 +1,37 @@ +/** + * Copy below code to register in any SPA application + */ + +export default function recorder(document) { + const t0 = Date.now(); + const events = []; + + const record = ({ x, y, pageX, pageY, screenX, screenY }) => { + let event = { + time: getCurrentElapsedTime(), + x, + y, + pageX, + pageY, + screenX, + screenY + }; + console.log(event); + events.push(event); + }; + + const dump = () => { + console.log(JSON.stringify(events)); + }; + + function getCurrentElapsedTime() { + return Date.now() - t0; + } + + // document.getElementsByTagName("body").onmousemove = record; + // document.getElementsByTagName("body").onclick = dump; + + document.getElementById("root").onmousemove = record; + document.getElementById("root").onclick = dump; + +} diff --git a/src/main/js/replayer.js b/src/main/js/replayer.js new file mode 100644 index 00000000..4f4fc47d --- /dev/null +++ b/src/main/js/replayer.js @@ -0,0 +1,10 @@ +const replay = () => { + let events = [] + events.forEach(e => ( + let el = document.elementFromPoint(data[0][1], data[0[2]]) + el.click() + )) + +} + +replay(); \ No newline at end of file diff --git a/src/main/js/saveFile.js b/src/main/js/saveFile.js new file mode 100644 index 00000000..aefe487f --- /dev/null +++ b/src/main/js/saveFile.js @@ -0,0 +1,12 @@ +const fs = require('fs'); + + function writeFile(path, data, opts = 'utf8') { + new Promise((res, rej) => { + fs.writeFile(path, data, opts, (err) => { + if (err) rej(err) + else res() + }) + }) +} + +writeFile("/mohan/content.json", "test") \ No newline at end of file diff --git a/src/main/js/selectPastRecords.js b/src/main/js/selectPastRecords.js new file mode 100644 index 00000000..bb70b149 --- /dev/null +++ b/src/main/js/selectPastRecords.js @@ -0,0 +1,23 @@ + const selectPastRecords = () => { + let table = document.getElementById("LibTblBulkDeals_BSE_Recent_Listing") + let rows = Array.from(table.tBodies[0].rows) + + let [header, ...tail] = Array.from(rows) + let columnHeaders = Array.from(header.cells).map(cell => cell.innerText) + let content = tail.map(row => Array.from(row.cells).map(cell => cell.innerText)) + let result = content.map(record => { + var r = {}; + record.map((v, i, a) => r[columnHeaders[i]] = v.replace(/^\s+|\s+$/g, '')); + delete r[""]; + return r + }) + + /** end-of parsing the table into json */ + console.log("Size of the content ~~" + result.length) + + return { + result + }; +}; + +module.exports = exports = selectPastRecords; \ No newline at end of file diff --git a/src/main/js/selectRecords.js b/src/main/js/selectRecords.js new file mode 100644 index 00000000..0aa5dcca --- /dev/null +++ b/src/main/js/selectRecords.js @@ -0,0 +1,26 @@ + const selectRecords = () => { + let table = document.getElementById("fnCtrBulkDealsListingLibTblBulkDeals_BSE_Recent_Listing") + let lastControl = document.querySelectorAll('#tnt_pagination li.Control')[1] + let lastPage = parseInt(lastControl.previousSibling.textContent.match(/[0-9]+/g)[0]) + debugger; + /** start parsing the table into json */ + let rows = Array.from(table.rows) + let [header, ...tail] = Array.from(table.rows) + let columnHeaders = Array.from(header.cells).map(cell => cell.innerText) + let content = tail.map(row => Array.from(row.cells).map(cell => cell.innerText)) + let result = content.map(record => { + var r = {}; + record.map((v, i, a) => r[columnHeaders[i]] = v.replace(/^\s+|\s+$/g, '')); + delete r[""]; + return r + }) + /** end-of parsing the table into json */ + console.log("Size of the content ~~" + result.length) + + return { + result, + lastPage + }; +}; + +module.exports = exports = selectRecords; \ No newline at end of file diff --git a/src/main/js/selectTable.js b/src/main/js/selectTable.js new file mode 100644 index 00000000..f747b8db --- /dev/null +++ b/src/main/js/selectTable.js @@ -0,0 +1,25 @@ +const selectMovie = () => { + const title = document.querySelectorAll('#content *[property$=itemreviewed]'); + const directBy = document.querySelectorAll('#content *[rel$=directedBy]'); + const initialReleaseDate = document.querySelectorAll( + '#content *[property$=initialReleaseDate]' + ); + const rate = document.querySelectorAll('#content *[property$=average]'); + const votes = document.querySelectorAll('#content *[property$=votes]'); + const recommendations = Array.from( + document.querySelectorAll('.recommendations-bd dt a[href]') + ).map(a => a.getAttribute('href').split('/')[4]); + + return { + data: [title, directBy, initialReleaseDate, rate, votes] + .map(doms => + Array.from(doms) + .map(d => d.textContent) + .join(',') + ) + .concat([recommendations.join(', ')]), + recommendations, + }; +}; + +module.exports = exports = selectMovie; diff --git a/src/main/md/.vscode/settings.json b/src/main/md/.vscode/settings.json new file mode 100644 index 00000000..7a73a41b --- /dev/null +++ b/src/main/md/.vscode/settings.json @@ -0,0 +1,2 @@ +{ +} \ No newline at end of file diff --git a/src/main/md/10Minutes.Improvement.md b/src/main/md/10Minutes.Improvement.md new file mode 100644 index 00000000..e50fb706 --- /dev/null +++ b/src/main/md/10Minutes.Improvement.md @@ -0,0 +1,54 @@ +# Technology +* Create 10 flash card +* Recollect 10 flast card +* Find source to create 10 flash card +* Read 100 lines of code +* Read https://github.com/trending +* Find project that has worthy 100 lines of code for next code reading +* Recollect or memorize 10 shortcuts + * Eclipse + * VSCode + * Excel + * Shell +* Quickly read a buzzword technology and details (Kafka Stream, Akka stream, Free Monad) +* Read release notes of latest + * Node.js + * Scala release + * Java Release +* Read the latest documentation + * Akka + * Rambda + * Java 10 + * Effective Java/Javscript bullet points +* Read blog + * Read about some implementation of xyz/kuber-netes/cache in large scale internet companies +* Browse useful slides from slideshare + * https://www.slideshare.net/larsga/nosql-and-einsteins-theory-of-relativity + * https://www.slideshare.net/larsga/deduplication +* Watch 3Blue1Brown Math vidoes +* Watch some youtube tech talk video and take notes + +## Tomy's suggestion +1. Consolidate & internalize your notes (e.g. vocab list) +1. Write something in your gratitude journal or pray to give thanks if you're religious +1. Organize your bookmarks +1. Plank or push up +1. Read your Pocket articles +1. Delete unnecessary photos in your phone (I find it theurapeutic) +1. View nice properties in 99.co (it motivates me!) +1. Watch your youtube/ pluralsight "Watch later" list + +## 10 minutes - Manage energy +* What drains your energy? +* What replenishes your energy? + * Being in nature? reading a good book? +* Make an action plan for energy? + + +# Mind +* Watch video about stoicism +* Watch video about buddhism + +# Reference +# [Architecture](https://github.com/davideuler/architecture.of.internet-product) +# [10 minutes]((https://www.quora.com/What-can-I-do-for-10-minutes-every-day-that-will-change-my-life) \ No newline at end of file diff --git a/src/main/md/Akin'sLaw.md b/src/main/md/Akin'sLaw.md new file mode 100644 index 00000000..84c406a0 --- /dev/null +++ b/src/main/md/Akin'sLaw.md @@ -0,0 +1,94 @@ +1. Engineering is done with numbers. Analysis without numbers is only an opinion. + +2. To design a spacecraft right takes an infinite amount of effort. This is why it's a good idea to design them to operate when some things are wrong . + +3. Design is an iterative process. The necessary number of iterations is one more than the number you have currently done. This is true at any point in time. + +4. Your best design efforts will inevitably wind up being useless in the final design. Learn to live with the disappointment. + +5. (Miller's Law) Three points determine a curve. + +6. (Mar's Law) Everything is linear if plotted log-log with a fat magic marker. + +7. At the start of any design effort, the person who most wants to be team leader is least likely to be capable of it. + +8. In nature, the optimum is almost always in the middle somewhere. Distrust assertions that the optimum is at an extreme point. + +9. Not having all the information you need is never a satisfactory excuse for not starting the analysis. + +10. When in doubt, estimate. In an emergency, guess. But be sure to go back and clean up the mess when the real numbers come along. + +11. Sometimes, the fastest way to get to the end is to throw everything out and start over. + +12. There is never a single right solution. There are always multiple wrong ones, though. + +13. Design is based on requirements. There's no justification for designing something one bit "better" than the requirements dictate. + +14. (Edison's Law) "Better" is the enemy of "good". + +15. (Shea's Law) The ability to improve a design occurs primarily at the interfaces. This is also the prime location for screwing it up. + +16. The previous people who did a similar analysis did not have a direct pipeline to the wisdom of the ages. There is therefore no reason to believe their analysis over yours. There is especially no reason to present their analysis as yours. + +17. The fact that an analysis appears in print has no relationship to the likelihood of its being correct. + +18. Past experience is excellent for providing a reality check. Too much reality can doom an otherwise worthwhile design, though. + +19. The odds are greatly against you being immensely smarter than everyone else in the field. If your analysis says your terminal velocity is twice the speed of light, you may have invented warp drive, but the chances are a lot better that you've screwed up. + +20. A bad design with a good presentation is doomed eventually. A good design with a bad presentation is doomed immediately. + +21. (Larrabee's Law) Half of everything you hear in a classroom is crap. Education is figuring out which half is which. + +22. When in doubt, document. (Documentation requirements will reach a maximum shortly after the termination of a program.) + +23. The schedule you develop will seem like a complete work of fiction up until the time your customer fires you for not meeting it. + +24. It's called a "Work Breakdown Structure" because the Work remaining will grow until you have a Breakdown, unless you enforce some Structure on it. + +25. (Bowden's Law) Following a testing failure, it's always possible to refine the analysis to show that you really had negative margins all along. + +26. (Montemerlo's Law) Don't do nuthin' dumb. + +27. (Varsi's Law) Schedules only move in one direction. + +28. (Ranger's Law) There ain't no such thing as a free launch. + +29. (von Tiesenhausen's Law of Program Management) To get an accurate estimate of final program requirements, multiply the initial time estimates by pi, and slide the decimal point on the cost estimates one place to the right. + +30. (von Tiesenhausen's Law of Engineering Design) If you want to have a maximum effect on the design of a new engineering system, learn to draw. Engineers always wind up designing the vehicle to look like the initial artist's concept. + +31. (Mo's Law of Evolutionary Development) You can't get to the moon by climbing successively taller trees. + +32. (Atkin's Law of Demonstrations) When the hardware is working perfectly, the really important visitors don't show up. + +33. (Patton's Law of Program Planning) A good plan violently executed now is better than a perfect plan next week. + +34. (Roosevelt's Law of Task Planning) Do what you can, where you are, with what you have. + +35. (de Saint-Exupery's Law of Design) A designer knows that he has achieved perfection not when there is nothing left to add, but when there is nothing left to take away. + +36. Any run-of-the-mill engineer can design something which is elegant. A good engineer designs systems to be efficient. A great engineer designs them to be effective. + +37. (Henshaw's Law) One key to success in a mission is establishing clear lines of blame. + +38. Capabilities drive requirements, regardless of what the systems engineering textbooks say. + +39. Any exploration program which "just happens" to include a new launch vehicle is, de facto, a launch vehicle program. + +39. (alternate formulation) The three keys to keeping a new human space program affordable and on schedule: + 1) No new launch vehicles. + 2) No new launch vehicles. + 3) Whatever you do, don't develop any new launch vehicles. + +40. (McBryan's Law) You can't make it better until you make it work. + +41. There's never enough time to do it right, but somehow, there's always enough time to do it over. + +42. Space is a completely unforgiving environment. If you screw up the engineering, somebody dies (and there's no partial credit because most of the analysis was right...) + + + +# References +* [The canonical set of Akin's Laws](http://spacecraft.ssl.umd.edu/akins_laws.html) +* [Detailed Explanation](http://www.ece.uvic.ca/~elec399/201409/Akin's%20Laws%20of%20Spacecraft%20Design.pdf) diff --git a/src/main/md/Ansible.md b/src/main/md/Ansible.md new file mode 100644 index 00000000..68f3b6ca --- /dev/null +++ b/src/main/md/Ansible.md @@ -0,0 +1,54 @@ +* Some of the application installation, maintenance requires complex task automation +* Ansible is a task execution engine + * Ability to manage remote computers is the power of Ansbile + * Execute tasks on multiple sysems at the same time + * Ansible is written using python + * Ansible scripts are written in YAML + * Ansible require one "Ansible Engine" as a control system + * Use of Microsoft Windows as control machine is not supported +* Requires python 2.6 or later +* On fedora + * sudo dnf search ansible + * sudo dnf install ansible +* Installation +```Bash +which ansible +ansible --version // 2.2.1.0 +/etc/ansible/ansible.cfg +``` + +```Bash +#when no sys-admin rights, use pip, and you can have multiple ansible verion +sudo dnf install python-virtualenv +sudo dnf install gcc openssl-devel +virtualenv ~/ansible +source ~/ansbile/bin/activate +pip search ansbile +pip install ansible +``` +* Inventory + * A set of potential targets where task could be executed + * Collecion of source machines/VM + * Domain could belong to + * Ungrouped, Grouped or SubGroups + ```yaml + server.sampledomain + [web] + server.sampledomain + [web:east] + japan.server.sampledomain + ``` + * Patterns + * Single, Inclusive, Exclusive, Union + * Inventory Variables + * Variable Scopes + * Host, Group, Group of Groups, All Group + * Inventory Sources + * Static + * Dynamic + * All cloud providers + * Dockers, Vagrant, VMWware, VirtualBox + * SSH_CONFIG + * [Inventor Contrib Sources](https://github.com/ansible/ansible/tree/devel/contrib/inventory) + * [Developing dynamic inventory source](http://docs.ansible.com/ansible/latest/dev_guide/developing_inventory.html) + \ No newline at end of file diff --git a/src/main/md/Aws/AdvancedDesignPatternforNoSQL.md b/src/main/md/Aws/AdvancedDesignPatternforNoSQL.md new file mode 100644 index 00000000..1d76577d --- /dev/null +++ b/src/main/md/Aws/AdvancedDesignPatternforNoSQL.md @@ -0,0 +1,83 @@ +## RDBMS + * Invented in 70's + * Invented when storage was expensive. 250K for 4 MB HDD + * Should know how join works on RDBMS, it is basic + * Features + * Optimized for storage, but CPU has to do lot of work for denormalized form. Join table and denormalize + * Normalized/relational + * Ad hoc queries + * Scale vertically + * Good for OLAP + * ACID transactions are required since we store the related data in different table, We need to update all the relevant table + * Design table and query can be anything + * 1 application service = N table + +## NOSQL + * Technology adoption and hype curve + * If you learn how to model data correctly for noSql, you will get best output + * Features + * Optimized for compute + * DeNormalized/hierarchical + * Instantiated Views + * Scale horizontally + * Built for OLTP at scale + * Every real world application has some soft of relation data model in nature + * ACID is not required since we can store/update all relvant data in same place/table + * Avoid relational design patterns, use one table + * Need to know the access pattern upfront + * Query should be known upfront, and table should be designed based on queries + * More we tune to the access pattern, more tightly coupled to application + * Not much flexible, but very efficient to use at scale + * 1 application service = 1 table + * When the data gets loaded, get even and pre-compute for future queries upfront + * + +## MongoDB aggregation framework is nice, but won't scale + + +## Amazon DynamoDB +* Fully managed NoSql +* Document or Key-Value + * Wide column key value store + * Every document should have one key - that is the partition key + * optional sort key - execute complex range queries - can order the items + * customer id - parition key, order date - sort key +* Support document attribute type +* 4 million transaction for second +* Fast and consistent +* Fine grain access + * Tables + * Documents in Tables + * Attributes in Documents +* Eventual consistent read vs Strong consisten read +* Global secondary index vs Local secondary index + * Local secondary index (Resort the data wihin the paritions - should be same as global partion key as table) + * Global secondary index - Could be new attribute for other aggregation. Example: Warehouse +* Partition Key - Large number of distinct values, Items are uniformly requested and randomly distribtued (Bad: Gender, Status. Good: CustomerId, DeviceId) + + +# Scenario design +* +Lack of CompositeKey +```SQL +Select * from Game where player='Bob' order by Date desc Filter on Status='PENDING' +``` +CompositeKey works better for large data +```SQL +Select * from Game where player='Bob' Filter on Status begins with 'PENDING' +``` + +Transaction +```SQL +COPY Item.V0 -> Item1.v3 if Item.v3 = null +Update Item1.v3 SET Attr1 +=1 +Update Item1.v3 SET Attr2 =.. +Update Item1.v3 SET Attr3 =.. +COPY Item.V3 -> Item1.v0 SET CurVer = 3 +``` + +# Reference +* (AWS re:Invent 2018: Amazon DynamoDB Deep Dive: Advanced Design Patterns for DynamoDB (DAT401))[https://www.youtube.com/watch?time_continue=33&v=HaEPXoXVf2k] +* (Rick Houlihan - Principal Technologist)[https://www.quora.com/profile/Rick-Houlihan] +* [amazon-dynamodb-deep-dive-advanced-design-patterns-for-dynamodb](https://www.slideshare.net/AmazonWebServices/amazon-dynamodb-deep-dive-advanced-design-patterns-for-dynamodb-dat401-aws-reinvent-2018pdf) +* [Best Practices NOSql](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/best-practices.html) \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/Essential_tools.md b/src/main/md/BetterDeveloper/Essential_tools.md new file mode 100644 index 00000000..e1b6c6c2 --- /dev/null +++ b/src/main/md/BetterDeveloper/Essential_tools.md @@ -0,0 +1,7 @@ +* git +* shell +* python - modern day shell +* pomodorro +* sql +* unit-test & mocking +* \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/Programmer_Following.md b/src/main/md/BetterDeveloper/Programmer_Following.md new file mode 100644 index 00000000..263f6aeb --- /dev/null +++ b/src/main/md/BetterDeveloper/Programmer_Following.md @@ -0,0 +1,35 @@ +## Important steps to follow + +### Scala +* Paweł Szulc + * [Going bananas with recursion schemes for fixed point data types - Paweł Szulc](https://www.youtube.com/watch?v=IlvJnkWH6CA) +* Ratan Sebastian + * [Introduction to Recursion Schemes](https://www.youtube.com/watch?v=6m0J_XllHqQ) +* [Rob Norris](http://tpolecat.github.io/presos.html) + * [Pure Functional Database Programming with Fixpoint Types—Rob Norris](https://www.youtube.com/watch?v=7xSfLPD6tiQ) + * http://tpolecat.github.io/presentations/sw2016/slides.html#1 +* [Dave Gurnell](http://davegurnell.com/]) + *[The Type Astronaut's Guide to Shapeless—Dave Gurnell](https://www.youtube.com/watch?v=Zt6LjUnOcFQ) +* [John A. De Goes](http://degoes.net/) + * https://gist.github.com/jdegoes +* Daniel Spiewak + * [Blog](http://www.codecommit.com/blog/) + * [High Wizardry in the Land of Scala](https://vimeo.com/28793245) + * [Extreme Cleverness: Functional Data Structures in Scala](https://vimeo.com/20262239) + * [Daniel Spiewak - Roll your own Shapeless](https://vimeo.com/165837504) + * [Lawful Asynchronous Programming — Daniel Spiewak](https://www.youtube.com/watch?v=B0L91sW3XHw) + * [Free as in Monads by Daniel Spiewak](https://www.youtube.com/watch?v=aKUQUIHRGec) +* Alissa Pajer + * [Alissa Pajer - Products, limits and more!](https://vimeo.com/165852490) + * [Alissa Pajer - Free all the functors](https://vidmoon.co/video/0UWBJFXEwwBBsRH) +* Philip Wadler + * [Category Theory for the Working Hacker](https://www.infoq.com/presentations/category-theory-propositions-principle?utm_source=twitter&utm_medium=link&utm_campaign=calendar) + +# Semantic Web and de-duplication +* Lars Marius Garshol + * https://www.slideshare.net/larsga/nosql-and-einsteins-theory-of-relativity + * + +### Javascript +* [Brian Lonsdorf](https://medium.com/@drboolean) + \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/ProjectX.md b/src/main/md/BetterDeveloper/ProjectX.md new file mode 100644 index 00000000..0f68ead6 --- /dev/null +++ b/src/main/md/BetterDeveloper/ProjectX.md @@ -0,0 +1,32 @@ +# Project X - Caliber or Google/Amazon developer + +* Pickup Algorithm and Datastrcuture book +* Cracking the code interview book +* System design book +* HackerRank - practice + + +# Alogirthm Courses + +* [Graph Theory and Graph Algorithms](https://nptel.ac.in/courses/128/106/128106008/) +* [Advanced Graph Theory (Video)](https://nptel.ac.in/courses/106/104/106104170/) +* [On Graph Theory and Graph Algorithms](https://nptel.ac.in/courses/128/106/128106001/) +* [Programming, Data Structures and Algorithms using Python](https://nptel.ac.in/courses/106/106/106106145/) +* [Programming and Data structures](https://nptel.ac.in/courses/106/106/106106130/) + + +# Datastrcutre +* https://www.hackerrank.com/domains/data-structures +* https://www.hackerrank.com/domains/algorithms +* https://www.hackerrank.com/interview/interview-preparation-kit + +## Training +* [System Desgin](https://www.educative.io/courses/grokking-the-system-design-interview) +* [Code Interview](Cracking the Coding Interview) +* [System Design Primer](https://github.com/donnemartin/system-design-primer) + + +* [Technical Interview Preparation Checklist](https://itnext.io/technical-interview-preparation-checklist-b000125f1535) +* [Tech interview @ Amazon](https://www.byte-by-byte.com/amazon-interview/) + + diff --git a/src/main/md/BetterDeveloper/SelfImrpovementAreas.md b/src/main/md/BetterDeveloper/SelfImrpovementAreas.md new file mode 100644 index 00000000..f0d979ec --- /dev/null +++ b/src/main/md/BetterDeveloper/SelfImrpovementAreas.md @@ -0,0 +1,36 @@ +# Git + * Read 50 git SO questions and familiarize + * Memorize 50 git commands + * Write a note about code search using git cli + +# Webpack + * Read documentation of webpack + +# Weekly review + * Effective Java + * Effective Javascript + * Typeclass signatures + * Airbnb code signatures + +# Improving Coding Skills + * Reading 2K lines of code each day continiously for a month + +# Javascript + +* Core Javascript development using functional style +* Chrome Dev Tools +* Visual studio code + * Shortcuts + * How to debug using editor + * Usage of Code template (CSS, React and Javascript) + * Code template shortcuts +* Basics of JQuery +* Strong knowledge of NPM +* Configuration of Webpack with HMR +* Configuration of Babel project and debugging on VSCode +* Basic knowledge about CSS and design + * I would recommend "The Non-Designer’s Design Book, Fourth Edition" +* Debugging alignment issues (CSS) using chrome +* Javascript code refactoring guidelines +* Javascript unit testing using Mocha, Jasmine +* Strong knowledge of Chrome Headless usage \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/_algorithm_.md b/src/main/md/BetterDeveloper/_algorithm_.md new file mode 100644 index 00000000..86804372 --- /dev/null +++ b/src/main/md/BetterDeveloper/_algorithm_.md @@ -0,0 +1,35 @@ +* To determine that two words are anagrams of each other, you can assign each letter to a unique prime number (a = 2, b = 3, c = 5 etc.), then compute the product of those numbers, and if they're equal then those two words are anagrams. + * https://en.wikipedia.org/wiki/Fundamental_theorem_of_arithmetic +* http://gleamly.com/article/introduction-kademlia-dht-how-it-works +* Bittorrent protocol. The elements of the protocol are all fairly easy to understand, but it's gorgeous to see how elegantly it solved a social problem + * http://www.bittorrent.org/beps/bep_0003.html + * https://wiki.theory.org/index.php/BitTorrentSpecification +* Diffie�Hellman + * https://www.youtube.com/watch?v=YEBfamv-_do + * Learning via paint analogy is great +* Tic-Tac-Toe - traversal + * By indexing the grid using values from a 3x3 magic square[1]. In a magic square, every row, column, and diagonal has the same sum. So checking for a winner was just checking if a player�s three moves added up to 15. Finding a space that would make the computer win, or block the human opponent from winning, was subtracting two moves from 15 and checking if that spot was available. +* Fourier transform + * The FFT algorithm is perhaps the most elegant and useful work of the 20th century. +* The Gilbert-Johnson-Keerthi convex shape intersection algorithm. +* The Gale-Shapley Algorithm to solve the "Stable Marriage" problem. (2012 Nobel Prize in Economic Sciences) +* Fast inverse square root + * https://en.m.wikipedia.org/wiki/Fast_inverse_square_root +* Exponential backoff + * https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/ +* http://www.codersnotes.com/notes/elegance-of-deflate/ +* Dijkstra's algorithm for finding the shortest path between two nodes in a graph. +* K - means clustering (+5) +* https://en.wikipedia.org/wiki/Fisher-Yates_shuffle +* Bloom filters (+5) +* https://en.wikipedia.org/wiki/Duff%27s_device +* A* - Algorithm + * http://theory.stanford.edu/~amitp/GameProgramming/AStarComparison.html +* Quickselect - https://en.m.wikipedia.org/wiki/Quickselect +* RAFT - +* Hyper Log Log +* Hash Cash - https://bitcoinmagazine.com/articles/genesis-files-hashcash-or-how-adam-back-designed-bitcoins-motor-block/ +* [Fisher–Yates - Random list shuffle algorithm](https://en.wikipedia.org/wiki/Fisher%E2%80%93Yates_shuffle) + +# References +* https://news.ycombinator.com/item?id=18236396 \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/aws_study_plan.md b/src/main/md/BetterDeveloper/aws_study_plan.md new file mode 100644 index 00000000..ef655dca --- /dev/null +++ b/src/main/md/BetterDeveloper/aws_study_plan.md @@ -0,0 +1,9 @@ +* [Agasthi Kothurkar - AWS CSA 2017 Study Guide](https://github.com/agasthik/aws-csa-2017/blob/master/README.md) +* [NigelEarle/AWS-CSA-Notes-2018](https://github.com/NigelEarle/AWS-CSA-Notes-2018) +* [Leonardofed Notes](https://gist.github.com/leonardofed/bbf6459ad154ad5215d354f3825435dc#file-readme-md) +* [mransbro/aws-developer-notes](https://github.com/mransbro/aws-developer-notes/blob/master/README.md) +* [AWS Certified Solutions Architect – Associate Study Notes](http://mistwire.com/2016/05/aws-certified-solutions-architect-associate-study-notes/) +* [nmrony/AWS-CSA-A-Notes.md](https://gist.github.com/nmrony/173ef44bc2655266b005fed8b21f4b4d#file-aws-csa-a-notes-md) +* [vaquarkhan/Aws-Solution-Architect-Notes](https://github.com/vaquarkhan/Aws-Solution-Architect-Notes) +* [vioan/aws-certified-solutions-architect-associate-study-notes](https://github.com/vioan/aws-certified-solutions-architect-associate-study-notes/blob/master/README.md) +* [dboyd13/aws-solution-architect-associate-study-notes](https://gist.github.com/dboyd13/2c9ce40e8f3885dffc913ccc3aabeb85#file-aws-solution-architect-associate-study-notes) \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/code_review.md b/src/main/md/BetterDeveloper/code_review.md new file mode 100644 index 00000000..430f0b34 --- /dev/null +++ b/src/main/md/BetterDeveloper/code_review.md @@ -0,0 +1,19 @@ +# General philosophy +* Code review should assume that code works, but is it maintainable? +* Software should be maintainable, and readable, clear, conformant code is necessary for future maintinability. +* Explain why piece code is not good? Never share personal preference like "I don't like this pieice of code" +* Ask why did they code that way? Are there other/better ways to do? +* Rephrase your objection as a question + * How can we do XXX with your change? instead of This change will make XXX impossible. +* + + +## Some guidelines +* You should always write your code as if comments didn't exist. +* Never take mondaic type as input (integer, instead of optional) +* Don't flatten the monad + + + +# Reference +* [Better ways to convey code review comments](https://developers.redhat.com/blog/2019/07/08/10-tips-for-reviewing-code-you-dont-like/) \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/developer_vs_architect.md b/src/main/md/BetterDeveloper/developer_vs_architect.md new file mode 100644 index 00000000..65d21c6b --- /dev/null +++ b/src/main/md/BetterDeveloper/developer_vs_architect.md @@ -0,0 +1,90 @@ +# Developer + +* Owns the implementation +* Specific to one system and one department +* When something goes wrong in the system, we know who built + +# Architect + +* Define strategic IT solution to meet the strategic business vision +* Define and maintain IT system and its integration design +* Define and drive the technical vision for the integration and efficiency of IT assets +* Create and review documentation and process regarding recurring issues, new standard operating procedures +* Enterprise-wide planning, communication and implementation +* Maintains strategic design and roadmap +* Develops technology standards and strategies +* Doesn't own any system/application specific failure +* Blueprints help architects ensure that what they are planning to build will work. Architects and their clients use blueprints to understand what they are going to build before they start building it. + +# Why Architects are required (why not developer alone enough?) + +* Thinking doesn't generate code, but it is important. Writing code without thinking is a recipe for bad code +* Writing (code/literature) is nature's way of letting you know how sloppy your thinking is. +* Blueprints help us think clearly about what we're building. Before writing a piece of code, we should write a blueprint. A blueprint for software is called a specification +* When there is disaster it is required to fly high to assess the damage, similarly planning also requires to fly high and requires to view out-of-the box. +* To design complex systems, the need for formal specs should be as obvious as the need for blueprints of a skyscraper. +* Code is a bad medium for helping to understand code. Architects don't make their blueprints out of bricks. +* The key to understanding complexity is abstraction, which means rising above the code level. +* Thinking doesn't guarantee that we won't make mistakes. But not thinking guarantees that we will. + +# Why Architects should code? + +* Specs are useless because we can't generate code from them. +* Specs does not proove you are right. But running code can easily invalidate your specs. +* In this metaphor code is the equivalent of blueprints, the compiler is the builder, and the working software is the house. Building architects deal in blueprints, and software architects should deal in code. +* We're not designing and building things to last a hundred years. we're iterating and learning as we go based on user feedback +* Abstract it very high level, Architecture Astronauts who fly too high where there are no oxygen +* Try to sell without actual details peer-to-peer (than Napster), Soap (than XML) + * They don't understand that new tool is just rehash of old solution + +# Architect don't need to code + +* Architect requires knowledge of data flows & elegant problem solving! +* Architect can program all day and the project will fail miserably. + * Code reviews, customer/team communication and constant evolutionary reviews of the current state of the project are crucial. + * Coding is good, but it is important to spend time on other things. +* Architect could be absolutely terrible developer, but if architect really good at making different software components fit together to meet a business goal, that is enough. +* Architect can relate well to business folks and developers. This is the essence of being an architect and frankly, most developers entirely lack this skill (they don't see business goal/problem). +* Should architect should start his work shoveling the floor and putting the first bricks, No! not required! +* Should infra/storage/network architect should code? +* If archiect code in spare time using assembly language, would that be sufficient? How about basic? + +# How should Architects function (from developer) + +* Modern Architects should write specification in a formal specification language like TLA+. +* Getting an algorithm right takes thought, which means writing a spec. (binary search was n't written within a day) +* The best language for being simple and precise is math. Writing spec on plain english may not be good enough for complex problem. +* Computer architect is somebody who could build the entire system by themselves given sufficient time. +* Architect is someone who is capable of defining the Vision of the complete system delivering the functional and quality attributes required. +* Architects should have some skin in the game. +* Definitely need to know how the pieces fit together and their individual pros and cons. +* Architects should have a very good understanding of the consequences of their decisions. + * Especially wrt the non-functional attributes of the system such as flexibility, scalability, reliability, fault tolerance etc. + +# How Architects should not function + +* It's bad if architect keep telling other people how to code something specific after they stop coding themselves +* If the architecture is a mess and hard to develop for, architects should pay the price too, and not just developers. + +# Building analgoy with architecture + +* They think tearing down walls is hard but changing code is easy, so blueprints of programs aren't necessary. +* Changing working code is hard � especially if we don't want to introduce bugs. +* In this metaphor code is the equivalent of blueprints, the compiler is the builder, and the working software is the house. Building architects deal in blueprints, and software architects should deal in code. +* Architects could be compared to football coaches. They aren't playing football anymore. Or at least not very often. But know how to score goals. + +# My perception + +* Architects not necessarily need to code, if they coded in the past, and know the business domain well, and take part actively in building system with *skin in the game* +* There are many architects (business architect, domain architect, security architect, solution architect, enterprise architect, application architect), Not everyone needs to code. +* Dedicated application architect without "skin in the game" is counter productive and useless +* Architect who solved similar problem for a given business domain is asset provided he starts with Spec even before others begin their work. +* If we could work with Architect would coded/code, and also acts as an Architect, probablity of delivering world-class system is high + * Assume if "Doug Cutting" helps to build big-data archiecture for a bank. + + +# References + +* https://www.joelonsoftware.com/2001/04/21/dont-let-architecture-astronauts-scare-you/ +* [Leslie LamportLeslie Lamport - Why We Should Build Software Like We Build Houses] (https://www.wired.com/2013/01/code-bugs-programming-why-we-need-specs/) +* [Should architect code?](https://twitter.com/dhh/status/1052958907063263232?s=20) \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/functional_programming.md b/src/main/md/BetterDeveloper/functional_programming.md new file mode 100644 index 00000000..861b0151 --- /dev/null +++ b/src/main/md/BetterDeveloper/functional_programming.md @@ -0,0 +1,36 @@ +* Fixed point + * c is a fixed point of the function f(x) if f(c) = c. + * if c is a fixed point, wherever you pass f(c), you could just pass c. (Stopy applying function) + * Below someCurve has fixed point of 2 + * f(x) => x^2 -x*x + 4 + * ```const someCurve = (n) => n ^2 - (3 * n) + 4``` +* Fixed point - YCombinator + * Ycombinator: ![alt text][ycombinator] + + +```js +const y = function(le) { + return function(f) { + return f(f); + }(function(f) { + return le( + function(x) { return (f(f))(x); } + ); + }); +}; +const makeFact = function(givenFact) { + return function(n) { + if( n < 2 ) return 1; + else return n * givenFact(n-1); + } +}; +const fact = y(makeFact); +console.log('Factorial ' + fact(5)); // Outputs 120 +``` +[ycombinator]: ../img/ycombinator.svg "Ycombinator" + +# References +* [UCombinator] (http://matt.might.net/articles/js-church/) +* [YCombinator] (https://blog.benestudio.co/fixed-point-combinators-in-javascript-c214c15ff2f6) +* [YCombinator] (https://blog.benestudio.co/fixed-point-combinators-in-javascript-c214c15ff2f6) +* [Recursion with Combinators in JavaScript](https://codeburst.io/recursion-with-combinators-injavascript-d797451d054d) \ No newline at end of file diff --git a/src/main/md/good_programmer.md b/src/main/md/BetterDeveloper/good_programmer.md similarity index 67% rename from src/main/md/good_programmer.md rename to src/main/md/BetterDeveloper/good_programmer.md index b583cdfc..0de51f52 100644 --- a/src/main/md/good_programmer.md +++ b/src/main/md/BetterDeveloper/good_programmer.md @@ -1,15 +1,28 @@ -# Traits of a good programmer -* Many other developers decorating or extending their existing solution, Meaning they already solved one or many important issues of the project/product. -* Good developers have a knack of breaking requirements and finding gaps. they are very opinionated. -* They challenge requirements and ensure that they knew what problem they are solving before thinking about library, language and platform -* They are very good with set of tools and its intricacies -* They can work faster and code more lines of program. -* How they act and, more deeply, how they think. -* They Plan ahead - It wasn't raining when Noah built the Ark -* They do their research - Measure once and cut once, They don't reinvent the wheel -* They read error messages and stacktraces -* They read source code, and learn from actual documentation (not from blog or some guide) -* They enjoy solving problem, and would take head on the most complex problem and code themselves -* They are good at more than one programming languages, but exceptionally good in their core programming language, and its eco-system. -* They write (and maintain) programs that other people wouldn't say bad things about. -* +# Traits of a good programmer +* Many other developers decorating or extending their existing solution, Meaning they already solved one or many important issues of the project/product. +* Good developers have a knack of breaking requirements and finding gaps. they are very opinionated. +* They challenge requirements and ensure that they knew what problem they are solving before thinking about library, language and platform +* They are very good with set of tools and its intricacies +* They can work faster and code more lines of program. +* How they act and, more deeply, how they think. +* They Plan ahead - It wasn't raining when Noah built the Ark +* They do their research - Measure once and cut once, They don't reinvent the wheel +* They read error messages and stacktraces +* They read source code, and learn from actual documentation (not from blog or some guide) +* They enjoy solving problem, and would take head on the most complex problem and code themselves +* They are good at more than one programming languages, but exceptionally good in their core programming language, and its eco-system. +* They write (and maintain) programs that other people wouldn't say bad things about. + +# Make it order +* 1) Make it work [in the nominal case] +* 2) Make it correct [in the edge cases] +* 3) Make it fast [without breaking the other two] + +# 20 skill to improve for any programmer +* Familiarize and memorize coding standard +* Familiarize and usage of code snippets +* Keyboard shortcuts to some of the often used tools +* Read some of the key open-source frameworks documentation and they aware basics of them +* Read some of the formally used proprietary frameworks and documentation +* Knowing and using basic Unit test-frameworks +* Strong knowledge about SCM, especially git diff --git a/src/main/md/BetterDeveloper/improve_programming.md b/src/main/md/BetterDeveloper/improve_programming.md new file mode 100644 index 00000000..a3d867ff --- /dev/null +++ b/src/main/md/BetterDeveloper/improve_programming.md @@ -0,0 +1,11 @@ +* Working with people far smarter than myself +* Always listening to what others have to say, regardless if they're junior, intermediate, senior or guru. job title doesn't mean anything. +* Half of everything you know will be obsolete in 18-24 months +* Read 2K lines of code every day + + +# References +* http://wiki.c2.com/?HowToImproveProgrammingSkills +* http://wiki.c2.com/?SoftwareMasterpiece +* http://wiki.c2.com/?ReadGreatPrograms +* https://www.reddit.com/r/javascript/comments/8frt24/the_secret_to_being_a_top_developer_is_building/?utm_source=reddit-android \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/modern_development_tools_stack.md b/src/main/md/BetterDeveloper/modern_development_tools_stack.md new file mode 100644 index 00000000..d9568766 --- /dev/null +++ b/src/main/md/BetterDeveloper/modern_development_tools_stack.md @@ -0,0 +1,7 @@ +* http://postgrest.org/en/v5.2/ +* https://hasura.io + * https://www.youtube.com/channel/UCZo1ciR8pZvdD3Wxp9aSNhQ/videos +* Generat REST API applications - https://github.com/strongloop/loopback +* Opent API + * https://github.com/OpenAPITools/openapi-generator + * https://marketplace.visualstudio.com/items?itemName=42Crunch.vscode-openapi diff --git a/src/main/md/BetterDeveloper/new_job_new_joiner_team_etiqutte.md b/src/main/md/BetterDeveloper/new_job_new_joiner_team_etiqutte.md new file mode 100644 index 00000000..cdb75d39 --- /dev/null +++ b/src/main/md/BetterDeveloper/new_job_new_joiner_team_etiqutte.md @@ -0,0 +1,67 @@ +## Guidelines +* Be patient and listen, listen and listen without any judgement. There is no better friend than active listening. +* No one likes a know-it-all - Be honest, confident and humble +* You will still do something embarrassing. And you will survive. You can't improve if you are worried about your mistakes. Don't worry about it! +* Observe what everyone is doing. Ask questions. +* Be comfortable asking others for help or referencing documentation, Try not-to-figure-out-yourself unless that is expected +* Follow Before You Lead, + * Be tolerant of practices that are foreign to your established approach. + * Mastering established team practices and rituals builds trust. +* Automating an existing process is likely to be better received than making an independent decision to replace an existing tool. +* Spend some of your personal time mastering and enhancing tools the team uses. +* Ensure your output is reviewed by existing employee before it is rolled out. + +## Don't +* Don’t try to join in on every conversation. Don’t ask people to clarify every inside joke. +* Don’t say something controversial if no one was asking for your opinion. +* Pointing out that the way you did something at your old job was better than the way your new employer does it. Don't do it! +* Giving your new co-workers unsolicited advice on 'best practices' in their areas of expertise. +* Walking in and immediately announcing the changes ("improvements") you plan to make -- before asking anybody why they do things the way they do. +* Expressing your shock or disappointment at the way your new company does things +* Telling everybody who will listen about your fabulous credentials. +* Bossing your co-workers around (when you're not the boss -- or even if you are!). +* Making a remark -- even a joke -- that suggests your new company should be grateful you showed up to save them from their unsophisticated ways. +* Approaching your new assignment with the cookie-cutter mindset "This company is no different from the other companies I've worked for." (That's not true!) +* Joking along the lines "If I'd known in advance that your company was stuck in the Dark Ages, I would never have taken the job!" +* Expressing frustration with your co-workers for their failure to meet your high standards for anything -- punctuality, subject-matter expertise or professionalism, for instance. + +## Sample - 30/60/90 Plan +* 30 day plan for me usually revolves around understanding existing solutions and complete mandatory trainings + * Meet manager and establish expectations for success + * Read employee handbook and all training materials + * Introduction to team members and cross-functional stakeholders + * Ask management for mentor suggestions + * Begin practicing in production support, find RCA and find most re-occuring incidences + * Know one/two system components and its code base + * Should have written at-least one test cases +* 60 day plan - Review the solution that is being rolled out, comment, get the feedback, understand the architecture + * Review for 60 days with management, seek areas of improvement + * By reviewing others solution, you would establish network + * Contribute, and understand the existing solution and release process + * Understand how it is being validated, and Knowing users of the application + * Setup bi-weekly call with mentor/manager + * Continue to build relationships internally with key stakeholders +* 90 day plan - master one of the core-component + * Should have covered component with 60% of test-cases. + * Should own all the details of the component, and ensure all the code related to it would be reviewed + * Prepare next 90 days plan + * Plan for team strategy, vision and mission + * Target for next certification + +## Three rules from Group (Overall good) +* Let your manager know that you are aggressive and would like to take up more work. Or own something. This always worked for me and gave me an opportunity to work with interesting stuff. +* Let your colleagues and your manager know the truth about how much you "don't" know about the tools and tech they use. (if you know everything already, you should find a new job). Tell them that you have "some" exposure in the things that you know. Be kind and humble but work real hard. I usually aim for twice as hard most days (in terms of hours). But DON'T COMPETE. Contribute to their work. Shadow them. Give them credit publicly, privately and to your bosses. You'll see that your favour is returned very soon. +* Don't let your insecurities drive you. Remember, you needn't prove anything. You've been recognized as someone who can add value to the organization. That's the reason you have the offer letter. +* Smile a lot + +### Five cautious group from Group (Think about it, first two make sense, others are debatable) +* The most important rule is to understand your own goal at the organization and then plan a path. Whatever you do should be aligned to that goal. Its not an army where everyone has the same goal. Try to be practical. +* Help as many people as you can. Have people on your side. Share as much knowledge as you can but dont brag. Dont make people treat you special. +* Too honest will get you screwed up so fast. Learn to know the audience and say only what matters. Realize that everyone has a different agenda and what you say will be used against you at some point. Tread carefully. +* Be a roman when you are in Rome. Dont be an outlier. Management hates outliers. +* Dont challenge your boss even in private. Remember your boss will remain as a boss forever. Its rare that someone overtakes the boss. He grows you grow. Help your boss grow or atleast act as if you care. + +## References +* [Nerdish Chatter Friends](https://web.whatsapp.com/) +* [How to behave when you join a new](https://lifehacker.com/how-to-behave-when-you-join-a-new-slack-1823462314) +* [The Ten Deadliest Mistakes New Employees Make](https://www.forbes.com/sites/lizryan/2016/11/03/the-ten-deadliest-mistakes-new-employees-make/#5a26de162448) diff --git a/src/main/md/programmer@70.md b/src/main/md/BetterDeveloper/programmer@70.md similarity index 67% rename from src/main/md/programmer@70.md rename to src/main/md/BetterDeveloper/programmer@70.md index 5d3252a3..d8df2095 100644 --- a/src/main/md/programmer@70.md +++ b/src/main/md/BetterDeveloper/programmer@70.md @@ -3,6 +3,11 @@ # Bias against old programmers * The major problem for older programmers is that people believe that the ability to learn decreases with age * Older programmers have higher salary requirements. My company interviewed (and eventually hired) a 60-year-old iOS developer, and he asked us for double what the developers in their 30s were asking for. +* Startups require developers to pick up technologies you're not comfortable with, and expect to put 60+ hours, whereas older ones may need to spend evenings with their family. +* Think slowly, and doen't have the ability to push stuff out the door quickly +* Some of the old people are with very strong opinions and aren't pragmatists. +* If they started their family in 50s, Having a toddlers or young kids at that age will make even more complicated +* age != experience != skill # Fundamental skills * What good developers requires are trust and good communication. @@ -14,13 +19,14 @@ * In software - It's highly desirable for an engineer to be adaptable - Changing companies is a good way to expose yourself to different environments/perspectives (and therefore become adaptable). It's hard to get the same range of experiences from a single company. * Stay relatively current but continually grow your knowledge depth. * You need to be visible, to be found, to be reckoned good, and to develop some kind of reputation. -* There are programmers who are nearing 70s and still fits finein startup crowd. Some of them are most liked in the company and they also do lot of extracurricular activities. +* There are programmers who are nearing 70s and still fits fine in startup crowd. Some of them are most liked in the company and they also do lot of extracurricular activities. +* An old patient is better than a new doctor # Advantages * The latest tech is often nothing new, and is instead just a repackaging of prior concepts. Leverage that advantage. * If you combine solid programming with deep subject domain experience, you offer something that is quite rare. * Should have something special to offer. - +* If programming is your passion, it might show one-way or other, you may not need to worry that much. Programming is not what I do, but who I am. # To avoid * Some of the older engineers that ended up staying with a single employer for the longest (say 10+ years in one job) generally have the most difficulty finding new work when the time comes. @@ -28,6 +34,7 @@ * You got more than 25 years of experience, but not very specialized! you will not get a job anymore. * Don't work for the man, especially when the man is just a boy. -# resources +# Resources * [YCombinator] (https://news.ycombinator.com/item?id=9497721) * [YCombinator] (https://news.ycombinator.com/item?id=3437233) +* [Bean up their nose] (https://www.uie.com/brainsparks/2011/07/08/beans-and-noses/) diff --git a/src/main/md/BetterDeveloper/programming_videos_to_watch.md b/src/main/md/BetterDeveloper/programming_videos_to_watch.md new file mode 100644 index 00000000..fb9e771f --- /dev/null +++ b/src/main/md/BetterDeveloper/programming_videos_to_watch.md @@ -0,0 +1,9 @@ +* [Transforming Programming - Dave Thomas](https://www.youtube.com/watch?v=A76hM3MpEKo) +* [ITT 2016 - Kevlin Henney - Seven Ineffective Coding Habits of Many Programmers](https://www.youtube.com/watch?v=ZsHMHukIlJY) +* [GOTO 2016 - The Error of Our Ways Kevlin Henney](https://www.youtube.com/watch?v=IiGXq3yY70o) +* [GOTO 2016 - Small Is Beautiful Kevlin Henney](https://www.youtube.com/watch?v=B3b4tremI5o) +* [GOTO 2017 - Code as Risk Kevlin Henney](https://www.youtube.com/watch?v=YyhfK-aBo-Y) +* [Beauty in Code 2018 - Kevlin Henney — 1968](https://www.youtube.com/watch?v=KjgvffBlWAg) +* [Opher Vishnia: Wait, you can do that with JavaScript | JSConf Iceland 2018](https://www.youtube.com/watch?v=JCATu2WkOq8) +* [Machine Learning with Python](https://www.youtube.com/watch?v=pnSBZ6TEVjY&index=45&list=PLQVvvaa0QuDfKTOs3Keq_kaG2P55YRn5v) +* [Sentdex Q&A. To start, I answer how I learned Python initially, as well as how I continue to learn Python. Next, we get into what I do and why.](https://www.youtube.com/channel/UCfzlCWGWYyIQ0aLC5w48gBQ) \ No newline at end of file diff --git a/src/main/md/BetterDeveloper/software_system_design.md b/src/main/md/BetterDeveloper/software_system_design.md new file mode 100644 index 00000000..60d027d2 --- /dev/null +++ b/src/main/md/BetterDeveloper/software_system_design.md @@ -0,0 +1,11 @@ +* http://aosabook.org/en/index.html +* http://blog.gainlo.co/index.php/category/system-design-interview-questions/ +* https://github.com/donnemartin/system-design-primer +* https://github.com/alex/what-happens-when +* http://highscalability.com/all-time-favorites/ +* https://www.palantir.com/2011/10/how-to-ace-a-systems-design-interview/ +* http://web.archive.org/web/20160313091215/https://www.palantir.com/2009/05/bandwidth-isnt-cheap-disk-isnt-cheap-cpu-isnt-cheap/ +* https://www.reddit.com/r/cscareerquestions/comments/5u825g/resources_on_learning_system_design_and_data/ +* https://github.com/jdsutton/Technical-Interview-Megarepo/tree/master/System%20Design +* https://github.com/shashank88/system_design +* https://github.com/checkcheckzz/system-design-interview diff --git a/src/main/md/BetterDeveloper/when_to_quit_and_stay.md b/src/main/md/BetterDeveloper/when_to_quit_and_stay.md new file mode 100644 index 00000000..2e4c5d83 --- /dev/null +++ b/src/main/md/BetterDeveloper/when_to_quit_and_stay.md @@ -0,0 +1,24 @@ +* When to stay and when to quit? (Anonymous heuristics) + 1. If you like the people you work with (I.e. They're not morons) + 1. If you like the work and you feel you're making an impact and learning + 1. If your happy days is more than 50% of all your days + 1. Then if the pay works for you, then staying seems reasonable + +* Quit + * Quit your job if you're more often bored/dissatisfied with your days at work + * Quit your job if you're bored, underpaid, and abused. Don't be scared :) + +* The second employees start looking elsewhere, they're as good as gone. For two reasons: + * If you gave them a reason to look elsewhere, odds are you are not offering them enough (again, this includes traditional compensation + intangibles), so if they start looking around, they will likely find something better. + * If you gave them a reason to look elsewhere, there is a very decent chance you have already lost them as productive employees no matter what. + +* Your only long term responsibility is to yourself: +* Employers don't struggle with these moral issues when they decide to get rid of you. You got to do what's best for you, nobody in that company is going to shed a tear. + +# Reference +* https://www.reddit.com/r/jobs/comments/37iu1q/knowing_when_to_quit_your_job/ +* + +# corporate smell +* http://www.cracked.com/article_16676_6-emails-you-get-when-your-company-about-to-go-under.html + \ No newline at end of file diff --git a/src/main/md/BetterPerson_SelfHelp/Daily_Practice.md b/src/main/md/BetterPerson_SelfHelp/Daily_Practice.md new file mode 100644 index 00000000..9860e948 --- /dev/null +++ b/src/main/md/BetterPerson_SelfHelp/Daily_Practice.md @@ -0,0 +1,19 @@ +* The Benefits of Talking to Yourself + * Talk loudly yourself + * Talk as if yourself is a third person, and don't use " I can do", rather "Jane! You can do this" + * Feedback hypothesis - The idea is, if you hear a word, that helps you see something?" + * My bet is that self-talk works best on problems where you are trying to stay on task and there are possible distractions + * For tasks with a multistep sequence, talking to yourself out loud can help you keep out distractions and remind yourself where you are + +* Daily log about lesson learnt +* Catholicism - examination of conscience + * At the end of the day ask this question, "did I become angry, even internally" + * Use your will to sincerely regret anything that you think was wrong + * Use a bit of problem solving to see how it could have gone differently + +* Practice non-violent communication +* Pomodorro + +# Reference + +* [Train to be calmer/kinder](https://news.ycombinator.com/item?id=16193377) diff --git a/src/main/md/BetterPerson_SelfHelp/KonMari.md b/src/main/md/BetterPerson_SelfHelp/KonMari.md new file mode 100644 index 00000000..1be6be2a --- /dev/null +++ b/src/main/md/BetterPerson_SelfHelp/KonMari.md @@ -0,0 +1,59 @@ +* Decide to Tidy up + * Everyone can learn how to tidy up. 90 on your mindset + * Decluttering would make our life joy, not for others + * Believe with all your heart that you can and will tidy +* Visualize your ideal life style + * Think about ideal life, what kind of life would you like to live here? + * By thinking about your ideal life style, you will begin identifying why you want to tidy up? +* Finish discarding first + * Storage is not the answer to clutter, you must begin by discarding + * What doesn't spark joy, get rid away + * Functional, information, emotional and rarity + * Above values comes into our way before getting rid away the product + * We need courage to get rid away + * Discarding really means choosing what to keep + * Don't choose what to discard, choose what to keep + * Keep things that makes brings happiness and sparks joy +* Decide what you are going to tidy, Don't decide where you are going to tidy + * Don't tidy up by place or room, but by category + * Decide what category going to be tidy up? + * Decide to tidy books or clothes? (not by place) + * Most can't tidy because they have too much of things + * People don't know how much volume they own, they keep accumulating + * Their storage is scattered over + * Do you want something a year from now? Do you want something 10 year from now? +* Fold rather than hanging clothes + * GIS: Fold clothes as rectangle + * Folding your clothes is an opportunity to show them your appreciation + * Wrinkles are caused by preassure, not by number of foldings + * Draw line that rises to the right + * Japanese believe by we touching our clothes we pass energy +* Book tidying + * Book has time, certain past "best sellers" are no more required + * Keeping book that we could read "someday", should be discarded. Someday never comes +* Paper tidying + * Discard all the paper that you won't use it in future. + * We think office has more paper than home, but if we bring all the papers togethers, home also has more paper + * They are scattered through-out and difficult to locate important paper + * We can classify papers into three, "Need Attention", "save contractual" and "save (other)" +* Don't keep things for "just because of" + * Unmaintained guest matresses + * Old batteries, and appliances + * Makeup samples are good for travel + * Spare buttons- Have you used button that fall off? + * WristWatch band piece, novelty post-its, left-over medicines + * Used eraser bits, key rings that never used, rust hairpins, sticky ball point pen + * broken appliances, unidentified cords +* Tackle sentimental items last + * Who you are now is more important than memories of past. Throw away useless stuff that reminds past + * Memory that are truly precious will stay with us. +* Storage + * Store same category in the same place + * Don't stack things (stack we always use top), store them upright + * Use shoe boxes for store most of the stuffs + * Empty your bag, wallet everyday + * Designate a "home" for everything + * + +# References +* [GIS](https://images.google.com) \ No newline at end of file diff --git a/src/main/md/BetterPerson_SelfHelp/Stay_there_to_move_forward.md b/src/main/md/BetterPerson_SelfHelp/Stay_there_to_move_forward.md new file mode 100644 index 00000000..3326b85c --- /dev/null +++ b/src/main/md/BetterPerson_SelfHelp/Stay_there_to_move_forward.md @@ -0,0 +1,31 @@ +# There are few ways to move forward, + * Extreeme focus on the ultimate goal and keep moving towards it. But won't work when the goal itself not worth to die for, or goal itself not clear. + * Question everything that you do, if it is not worth few years from now, discard it now. + * Lesser the rubbish would leads to find the true of you + +# Is it spritual exercise? +* No! It is treating addication, here both the patient and doctor are the same. + +# Are there any guiding principle? +* On which you invest would grow! +* Anything optional, say no! +* Adopt the boredom, boredom is not part of life, it is life! +* Chase the pain + * Test if will-power theory is true, (Would abstaining coffee would drain my day?) + +# What is the problem? + * Extreeme distractions, where mind started wandering, and it took control of itself. There is no-navigator + +# What I discovered by reducing social-network + * My old sweet day-dreams, where I am the hero, the best cricket batsman who can hit 6-6's in the very first over of the best bowler. Where does it leads me to, I don't know, but I enjoy this. IGNORANCE is bliss + * Observing that still distracted, but definitely able to conquer it, witnessing the navigator, who is able to cheat the mind wandering few times + * Observing people, and they are also having short span of attention + * Observing friends who are scared about their future (few I considered as leaders) + * Observing people who lost jobs, and judging why they might have lost it + +# What is the thing to abstain or reduce from? + * Reddit + * Whatsapp + * Twitter/instagram/fb/amzn/good-reads + * Youtube + * Browser (reduce) diff --git a/src/main/md/Cassandra/Cassandra.md b/src/main/md/Cassandra/Cassandra.md new file mode 100644 index 00000000..7363a232 --- /dev/null +++ b/src/main/md/Cassandra/Cassandra.md @@ -0,0 +1,5 @@ + +# Reference +* (AWS re:Invent 2018: Amazon DynamoDB Deep Dive: Advanced Design Patterns for DynamoDB (DAT401))[https://www.youtube.com/watch?time_continue=33&v=HaEPXoXVf2k] +* [amazon-dynamodb-deep-dive-advanced-design-patterns-for-dynamodb](https://www.slideshare.net/AmazonWebServices/amazon-dynamodb-deep-dive-advanced-design-patterns-for-dynamodb-dat401-aws-reinvent-2018pdf) +* [Best Practices NOSql](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/best-practices.html) \ No newline at end of file diff --git a/src/main/md/Cassandra/Cassandra_TDG.md b/src/main/md/Cassandra/Cassandra_TDG.md new file mode 100644 index 00000000..1824ea4a --- /dev/null +++ b/src/main/md/Cassandra/Cassandra_TDG.md @@ -0,0 +1,130 @@ +## New model +* IBM DB1 - IMS (Hierarchical dbms) - Released in 1968 +* IBM DB2 - 1970 - "A Relational Model of Data for Large Shared Data Banks - Dr. Edgar F. Codd" +* Pros : It works for most of the cases + * SQL - Support + * ACID - Transaction (A Transformation of State - Jim Gray) + * Atomic (State A to State B - no in-between) + * Consistency + * Isolated - Force transactions to be serially executed. (If it doesn't require consistency and atomic, it is possible to have isolated and parallel txns) + * Durable - Never lost +* Cons : Won't work for massively web scale db + +## How RDBMS is tuned + +* Introduce Index +* Master(write), Slave (many times only used for read) + * Introduces replication and transaction issues + * Introduces consistency issues +* Add more CPU, RAM - Vertical scaling +* Partitioning/Sharding +* Disable journaling + +## Two-phase commit vs Compensation + +* Compensation + * Writing off the transaction if it fails, deciding to discard erroneous transactions and reconciling later. + * Retry failed operations later on notification. +* In a reservation system or a stock sales ticker, these are not likely to meet your requirements. +* For other kinds of applications, such as billing or ticketing applications, this can be acceptable. +* Starbucks Does Not Use Two-Phase Commit + * https://www.enterpriseintegrationpatterns.com/ramblings/18_starbucks.html + +## Sharding (Share nothing) + +* Rather keeping all customer in one table, divide up that single customer table so that each database has only some of the records, with their order preserved? Then, when clients execute queries, they put load only on the machine that has the record they’re looking for, with no load on the other machines. +* How to shard? + * Name-wise sharding issues like customer names that starts with "Q,J" will have less, whereas customer name starts with J, M and S may be busy + * Shard by DOB, SSN, HASH +* Three basic strategies for determining shard structure + * Feature-based shard or functional segmentation + * Key-based sharding - one-way hash on a key data element and distribute data across machines according to the hash. + * Lookup Table + +# [NoSQL](http://nosql-database.org/) + +* Key-Value stores - Oracle Coherence, Redis, and MemcacheD, Amazon’s Dynamo DB, Riak, and Voldemort. +* Column stores - Cassandra, Hypertable, and Apache Hadoop’s HBase. +* Document stores - MongoDB and CouchDB. +* Graph databases - Blazegraph, FlockDB, Neo4J, and Polyglot +* Object databases - db4o and InterSystems Caché +* XML databases - Tamino from Software AG and eXist. + + +## Apache Cassandra + +* “Apache Cassandra is distributed, decentralized, elastically scalable, highly available, fault-tolerant, tuneably consistent, row-oriented database that bases its distribution design on Amazon’s Dynamo and its data model on Google’s Bigtable.” +* No SPOF + * Is not Master/Slave (MongoDB is master/slave) +* Tuneably consistent (not Eventual Consisten as majority believes) + +## Cassandra Features + +* CQL (moved from Thrift API) +* Secondary indexes +* Materialized views +* Lightweight transactions +* Consistency = Replication factor + consistency level (delegated to clients) + * Consistency level <= replication factor +* Cassandra is not column-oriented (it is row oriented) +* Column values are stored according to a consistent sort order, omitting columns that are not populated + +## Consistency Forms + +* Strict (or Serial) Consistency + * Works on Single CPU +* Casual Consistency (like Casuation) + * The cause of events to create some consistency in their order. + * Writes that are potentially related must be read in sequence. + * If two different, unrelated operations suddenly write to the same field, then those writes are inferred not to be causally related. +* Weak (or) Eventual Consistency + * Rather than dealing with the uncertainty of the correctness of an answer, the data is made unavailable until it is absolutely certain that it is correct + + +## Row-Oriented + +* Cassandra’s data model can be described as a partitioned row store, in which data is stored in sparse multidimensional hashtables. +* “Sparse” means that for any given row you can have one or more columns, but each row doesn’t need to have all the same columns as other rows like it (as in a relational model). +* “Partitioned” means that each row has a unique key which makes its data accessible, and the keys are used to distribute the rows across multiple data stores. + +## Always writeable + +* A design approach must decide whether to resolve these conflicts at one of two possible times: during reads or during writes. That is, a distributed database designer must choose to make the system either always readable or always writable. Dynamo and Cassandra choose to be always writable, opting to defer the complexity of reconciliation to read operations, and realize tremendous performance gains. The alternative is to reject updates amidst network and server failures. +* CAP Theorem + * Choose any two (of threee) + * Cassandra assumes that network partitioning is unavoidable, hence it lets us deal only with availability and consistency. + * CAP placement is independent of the orientation of the data storage mechanism + * CAP theorem database mapping + * AP - ? + * To primarily support availability and partition tolerance, your system may return inaccurate data, but the system will always be available, even in the face of network partitioning. DNS is perhaps the most popular example of a system that is massively scalable, highly available, and partition tolerant. + * CP - ? + * To primarily support consistency and partition tolerance, you may try to advance your architecture by setting up data shards in order to scale. Your data will be consistent, but you still run the risk of some data becoming unavailable if nodes fail. + * CA - ? + * To primarily support consistency and availability means that you’re likely using two-phase commit for distributed transactions. It means that the system will block when a network partition occurs, so it may be that your system is limited to a single data center cluster in an attempt to mitigate this. If your application needs only this level of scale, this is easy to manage and allows you to rely on familiar, simple structures. + + +## Notable tools +* Sstableloader - Bulk loader +* Leveled compaction strategy - for faster reads +* Atomic batches +* Lightweight transactions were added using the Paxos consensus protocol +* User-defined functions +* Materialized views (sometimes also called global indexes) + +## Few use cases + +* Cassandra has been used to create a variety of applications, including a windowed time-series store, an inverted index for document searching, and a distributed job priority queue. + +## Updated CAP - Brewer's Theorem + +* Brewer now describes the “2 out of 3” axiom as somewhat misleading. He notes that designers only need sacrifice consistency or availability in the presence of partitions, and that advances in partition recovery techniques have made it possible for designers to achieve high levels of both consistency and availability. + + +## Quotes +* If you can’t split it, you can’t scale it. "Randy Shoup, Distinguished Architect, eBay" +* [“The Case for Shared Nothing” - Michael Stonebreaker](http://db.cs.berkeley.edu/papers/hpts85-nothing.pdf) + +## References +* [Cassandra Guide](https://github.com/jeffreyscarpenter/cassandra-guide) +* [Cassandra Paper](http://www.cs.cornell.edu/projects/ladis2009/papers/lakshman-ladis2009.pdf) +* CassandraSummit \ No newline at end of file diff --git a/src/main/md/ConferenceVideos/conferences.md b/src/main/md/ConferenceVideos/conferences.md new file mode 100644 index 00000000..3a81032c --- /dev/null +++ b/src/main/md/ConferenceVideos/conferences.md @@ -0,0 +1,3 @@ +* [GeeCON Conference](https://www.youtube.com/channel/UCVnJYdr91EZW8YvtMrxB1bg) +* [Oracle Developers](https://www.youtube.com/channel/UCdDhYMT2USoLdh4SZIsu_1g) +* [Oreily conferences](https://www.youtube.com/user/OreillyMedia/playlists) \ No newline at end of file diff --git a/src/main/md/DataScience/CheatSheet_For_Langauge.md b/src/main/md/DataScience/CheatSheet_For_Langauge.md new file mode 100644 index 00000000..3199a8c4 --- /dev/null +++ b/src/main/md/DataScience/CheatSheet_For_Langauge.md @@ -0,0 +1,2 @@ +* [Python Cheatsheet](https://gto76.github.io/python-cheatsheet/) +* [R for DataScience](https://github.com/hadley/r4ds) \ No newline at end of file diff --git a/src/main/md/DataScience/DataScience.md b/src/main/md/DataScience/DataScience.md new file mode 100644 index 00000000..acae975b --- /dev/null +++ b/src/main/md/DataScience/DataScience.md @@ -0,0 +1,5 @@ +* [Microsoft Professional Program Certificate in Data Science.](https://www.edx.org/microsoft-professional-program-data-science) +* [Essential Statistics for Data Analysis using Excel](https://www.edx.org/course/essential-statistics-data-analysis-excel-3) +* [Analyzing and Visualizing Data with Excel] (https://www.edx.org/course/analyzing-and-visualizing-data-with-excel-0) +* [https://mlcourse.ai/](https://mlcourse.ai/) +* [Datascience CMU](https://oli.cmu.edu/product-category/data-science-indep/) diff --git a/src/main/md/DataScience/Datascience_ML_Books.md b/src/main/md/DataScience/Datascience_ML_Books.md new file mode 100644 index 00000000..edd5e5ac --- /dev/null +++ b/src/main/md/DataScience/Datascience_ML_Books.md @@ -0,0 +1,2 @@ +* https://mml-book.github.io/ +* https://www-bcf.usc.edu/~gareth/ISL/ISLR%20First%20Printing.pdf \ No newline at end of file diff --git a/src/main/md/DataScience/InterviewQuestion_on_Statistics_For_DataScience_Programmers.md b/src/main/md/DataScience/InterviewQuestion_on_Statistics_For_DataScience_Programmers.md new file mode 100644 index 00000000..09863850 --- /dev/null +++ b/src/main/md/DataScience/InterviewQuestion_on_Statistics_For_DataScience_Programmers.md @@ -0,0 +1,23 @@ +## One who is able to answer below questions, will be considered with adequate knowledge in statistics. + +* What is the difference between statistics and probability? + * Statistics - Summarizes the data or population (might based on sample) + * Probablity - summarizes uncertinity of an event +* What is the standard deviation? +* What are all the way to describe "average" of a given population? + * Mean (Artimetic mean) + * Median (more number of ocurrence) + * Mode (middle) +* What is standard normal vairable? Given an example. +* What is z-score? +* What is the binomial distribution? +* What is the normal distribution? +* What is standard deviation rule? +* What is the regression? +* What is the meaning of -1,0,1 for regression? +* If regression is 1, does it mean we found the cause? +* What is lurking variable? +* What is std.deviation of sample means? + * zigma/sqrt(n) + * It is smaller than std.deviation of population +* \ No newline at end of file diff --git a/src/main/md/ErrorsEncountered.md b/src/main/md/ErrorsEncountered.md new file mode 100644 index 00000000..6cf85aac --- /dev/null +++ b/src/main/md/ErrorsEncountered.md @@ -0,0 +1,5 @@ +* Error: ER_NOT_SUPPORTED_AUTH_MODE: Client does not support authentication protocol requested by server; consider upgrading MySQL client + * It happened in mysql-8 + * For above solution is ~~> ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY 'root' Error Code: 1290. + * But It is unable to The MySQL server is running with the --skip-grant-tables option so it cannot execute this statement 0.000 sec +* How to disable --skip-grant-tables \ No newline at end of file diff --git a/src/main/md/EventSource.md b/src/main/md/EventSource.md index e0c6c0d2..d1eaf85e 100644 --- a/src/main/md/EventSource.md +++ b/src/main/md/EventSource.md @@ -1,16 +1,31 @@ -* Event - Immutable object, input to the system -* Event (Lean) - Only required details (Moust click, Address Change, NewOrder) -* Event (Rich or Fat) - Event with past, present and additional context, so that processing system no need to query other system to know the details. -* GetExchangeRateUSD_JPY - * Is above event? - * Result would change depends on time. -* CommandSource :: State -> Command -> Event -* EventSource :: State -> Event -> Event - -* [Command Source](http://thinkbeforecoding.com/post/2013/07/28/Event-Sourcing-vs-Command-Sourcing) -* [Event Source](https://www.martinfowler.com/eaaDev/EventSourcing.html) -* [Event Source](https://goodenoughsoftware.net/tag/event-sourcing/) -* [CQRS](https://goodenoughsoftware.net/tag/cqrs/) -* [Event Source](https://ookami86.github.io/event-sourcing-in-practice/#further-reading-1.md) -* [Domain Event | Command](https://www.martinfowler.com/eaaDev/DomainEvent.html) -* [Microsoft EventSource document](https://docs.microsoft.com/en-us/azure/architecture/patterns/event-sourcing) \ No newline at end of file +* Event + * Facts, Already happened + * Immutable object, input to the system + * New events can invalidate old events + * Cannot be deleted, retracted + * Even name should be in past tense - *NameChanged, OrderPlaced, UserDeleted, SettlementInitiated, TradeSettled, GoodsDelivered* +* Event + * (Lean) - Only required details (Mouse click, Address Change, NewOrder) + * (Rich or Fat) - Event with past, present and additional context, so that processing system no need to query other system to know the details. +* Reactor + * Receive and React (or not) to events (facts) +* Event Sourcing architecture should rely on eventual consistency + * There is no current accurate instance of state +* GetExchangeRateUSD_JPY + * Is above event? + * Result would change depends on time. +* CommandSource :: State -> Command -> Event + * Object form of method/Action request + * Represents side effects + * CreateOrder, UpdateInventory, ShiptProduct + * "Tell Don't Ask" - Command name should not ask, it should tell. "ShipIt, TransferIt, DeliverIt, CloseOrder, DelteUser" +* EventSource :: State -> Event -> Event +* [Command Source](http://thinkbeforecoding.com/post/2013/07/28/Event-Sourcing-vs-Command-Sourcing) +* [Event Source](https://www.martinfowler.com/eaaDev/EventSourcing.html) +* [Event Source](https://goodenoughsoftware.net/tag/event-sourcing/) +* [CQRS](https://goodenoughsoftware.net/tag/cqrs/) +* [Event Source](https://ookami86.github.io/event-sourcing-in-practice/#further-reading-1.md) +* [Domain Event | Command](https://www.martinfowler.com/eaaDev/DomainEvent.html) +* [Microsoft EventSource document](https://docs.microsoft.com/en-us/azure/architecture/patterns/event-sourcing) +* [How Events Are Reshaping Modern Systems](https://www.infoq.com/presentations/systems-event-driven) +* [TellDontAsk](https://martinfowler.com/bliki/TellDontAsk.html) diff --git a/src/main/md/FlashCards/Java9_to_11FlashCards.md b/src/main/md/FlashCards/Java9_to_11FlashCards.md new file mode 100644 index 00000000..c061fb4d --- /dev/null +++ b/src/main/md/FlashCards/Java9_to_11FlashCards.md @@ -0,0 +1,59 @@ +## Give an example for local variable type inference +```java +var list = new ArrayList(); // infers ArrayList +var stream = list.stream(); // infers Stream +``` +---- + +## Additional local-variable syntax for lambda parameters +```java +list.stream() + .map((@Notnull var s) -> s.toLowerCase()) + .collect(Collectors.toList()); +``` +---- +## What are the changes to HTTP Client +```java +// Additional package java.net.http +java.net.http.HttpClient +java.net.http.HttpRequest +java.net.http.HttpResponse +java.net.http.WebSocket +``` +---- + +## How to negate predicate in Java-11 +``` +lines.stream() + .filter(Predicate.not(String::isBlank)) +``` + +---- + +## What are sequence of https protocol? +* Browser requests a secure page with https url +* WebServer sends it's public-key with server certificate +* Browser ensures the certifcate is valid + * Certificate is not expired + * Not revoked + * Issued by a Trusted 3rd party +* Browser creates a Symmetric Key and sends it to the Server (after encrypting using webserver's public-key) +* WebServer decrypts the symmetric key using its private key +* WebServer sends page encrypted with the Symmetric key (of the client) +* Browser decrypts the page using symmetric key and displays the content + +---- + +## How to configure passwordless ssh on the server? + + +1. Create keys on the client machine + * ssh-keygen -t rsa + * ~/.ssh/id_rsa and ~/.ssh/id_rsa.pub +2. copy id_rsa.pub on remote host +3. Edit your sshd_config file on the remote server + * Append the contents of id_rsa.pub to ServerHome/.ssh/authorized_keys + * cat id_rsa_of_client.pub >> ServerHome/.ssh/authorized_keys + * File permission should be 0600 ServerHome/.ssh/authorized_keys + +---- \ No newline at end of file diff --git a/src/main/md/Food.md b/src/main/md/Food.md new file mode 100644 index 00000000..1758f6b0 --- /dev/null +++ b/src/main/md/Food.md @@ -0,0 +1,21 @@ +Definition: The pH scale ranges from 1 to 14. Acidic foods, such as lemons, have a lower number while alkaline foods, such as bananas, have a number closer to 14. + +* Base food + * Bananas + * Watermelon (alkaline) + * celery and kale (more alkaline) + * Most fruit juices are base. + + +* Acidic + * All grains + * Tomatoes + * Whole-meal and refined bread and pasta, white and brown rice, amaranth, millet, oats, buckwheat, spelt and rye. + * Salmon to steak, all animal flesh has an acidic effect on the body. + * Some of the most acidic foods in this category are shellfish, sardines, prawns, trout, goose, liver, rabbit and processed meats such as canned corned beef, lunch meat and salami + * Any milk, + * cottage cheese, Camembert, Edam, Parmesan and processed cheese. + * Eggs are acidic, but the yolks are far more acidic than the whites. + +Reference +* https://trans4mind.com/nutrition/AirWaterLife-FoodImpactOnBody-pH-Chart.png diff --git a/src/main/md/FunctionalProgramming/FreeMonads.md b/src/main/md/FunctionalProgramming/FreeMonads.md new file mode 100644 index 00000000..2d90fbba --- /dev/null +++ b/src/main/md/FunctionalProgramming/FreeMonads.md @@ -0,0 +1,69 @@ +## [YOW! Lambda Jam 2014 - Run free with the monads: Free Monads for fun and profit - Ken Scrambler] (https://www.youtube.com/watch?v=fU8eKoakd6o) + +## Why free monad +* Sepeartion of concerns +* Impreative and side-effect code +* Convert decisions as data + + +def regularAI() = { + val enemy = findNearesTank() + val angle = angleTo(enemy) + aimToward(angle) + fire() +} + +## Problems +* How to compose side-effects +* Linking effectful inputs (sequencing) +* Gradual execution + * Internally may need to manage state + * State transition should be properly managed + * May force to adopt custom scripting langauge + * Ugly & hard, won't compose +* We need very close to imperative style, but should work purely functional way +* Monads to the rescue (for all the above problems) + +## What is "Free" data structure +* Free is a data structure +* Represents tree of computatons +* Free[F[_], A] + * Suspend(F[Free[F, A]]) + * Return(A) +* We can assume Suspend and Return like + * When we map Suspend, it produces Free[F[_]] such as ```Suspend(F[Free[F, A]]).map(fn) =>Free[F[_], A]``` + * Free[F[_], A] may turn to one of following type + * Suspend(F[Free[F, A]]) or Free[F[_],A] +* liftF : F[A] => Free[F,A] + +## Why free Monad +* Sequential computations +* Elegant imperative-style syntax +* Lazy evaluation + +## What do we need? +* Design our DSL ADT (Set of API specific to problem domain can be considered like DSL) +* If we convert DSL ADT data-structure to a functor, then we get a Free Monad +* If your functor also happens to be a Monad, it is not that thing, it is a new thing + + + + +Steps +* 1 - Design DSL, Create ADT to represent DSL (sealed trait and case class and objects) +* 2 - Create a type hole in the above DSL (to map over to other types) +* 3 - Continuation of statements - 3-a) void/unit is replaced with next continuation +* 3 - Continuation of expressions - 3-b) return value also replaced as function (rather returning, accept successive function that requires result of current computation) (hollywood principle) + * instead of ```def findNearestTank(): Tank```, write ```case class FindNearestTank(fn: Tank => Next) extends SuperTrait``` + * instead of ```def angleTo(pos: Postion): Angle```, write ```case class AngleTo(pos: Position, fn: Angle => Next) extends SuperTrait``` +* 4 Make above DSL a functor, implement fmap method for the super trait + * ```def map[B](f: Next => N): AIMOVE[B] = this match { case Fire(next) => {}. case AngleTo(x, y) => {} ...}``` + * above map method should cover entire ADT tree, For example if this is Option, map should cover both None, Some +* 5 - 5a) Lifting Statements ``def aimToward(a: Angle) : AI[Unit] = Free.liftF(AimToward(a, ())) +* 5 - 5b) Lifting expressions ``def aimToward(a: Angle) : AI[Unit] = Free.liftF(AimToward(a, ())) +* Write interpreter that can covert F[A] into G[A] + +## References +[Free Monad Example] - (scalaz_examples\src\main\scala\scalaz\example\FreeUsage.scala) +[Slides] - (https://www.slideshare.net/kenbot/running-free-with-the-monads) +[Code] - (https://github.com/kenbot/free) diff --git a/src/main/md/FunctionalProgramming/Idris.md b/src/main/md/FunctionalProgramming/Idris.md new file mode 100644 index 00000000..15e462a1 --- /dev/null +++ b/src/main/md/FunctionalProgramming/Idris.md @@ -0,0 +1,47 @@ +* IDRIS - Dependently Typed Functional Programming Language + * Software bugs would become pretty costly in future, In Typed programs many errors are found during compile time. With dependent types compilers will do more work than simple typed programs + * Supports tactic based theorem proving + * Types may be predicated on values +* Total Function + * A total function is guaranteed to produce a result + * Total function will return a value in a finite time for every possible well-typed input + * Total function guaranteed not to throw any exceptions +* Type driven development + * Types are first class language construct, function can accept, manipulate and return types + * Types can be manipulated, used, passed as arguments to functions, and returned from functions just like any other value, such as numbers, strings, or list + * It allows relationships to be expressed between values; for example, that two lists have the same length. + * It allows assumptions to be made explicit and checkable by the compiler. + * For example, if you assume that a list is non-empty, Idris can ensure this assumption always holds before the program is run. + * If desired, it allows program behavior to be formally stated and proven correct. + + + +# Chapter-1 Summary # +* Types are a means of classifying values. Programming languages use types to decide how to lay out data in memory, and to ensure that data is interpreted consistently. +* A type can be viewed as a specification, so that a language implementation (specifically, its type checker) can check whether a program conforms to that specification. +* Type-driven development is an iterative process of type, define, refine, creating a type to model a system, then defining functions, and finally refining the types as necessary. +* In type-driven development, a type is viewed more like a plan, helping an interactive environment guide the programmer to a working program. +* Dependent types allow you to give more-precise types to programs, and hence more informative plans to the machine. +* In a functional programming language, program execution consists of evaluating functions. +* In a purely functional programming language, additionally, functions have no side effects. +* Instead of writing programs that perform side effects, you can write programs that describe side effects, with the side effects stated explicitly in a program’s type. +* A total function is guaranteed to produce a result for any well-typed input in finite time. +* Idris is a programming language that’s specifically designed to support type-driven development. It’s a purely functional programming language with first-class dependent types. +* Idris allows programs to contain holes that stand for incomplete programs. +* In Idris, types are first-class, meaning that they can be stored in variables, passed to functions, or returned from functions like any other value. + + +# Chapter-2 Summary # +* The Prelude defines a number of basic types and functions and is imported automatically by all Idris programs. +* Idris provides basic numeric types, Int, Integer, Nat, and Double, as well as a Boolean type, Bool, a character type, Char, and a string type, String. +* Values can be converted between compatible types using the cast function, and can be given explicit types with the the function. +* Tuples are fixed-size collections where each element can be a different type. +* Lists are variable size collections where each element has the same type. +* Function types have one or more input types and one output type. +* Function types can be generic, meaning that they can contain variables. These variables can be constrained to allow a smaller set of types. +* Higher-order functions are functions in which one of the arguments is itself a function. +* Functions consist of a required type declaration and a definition. Function definitions are equations defining rewrite rules to be used during evaluation. +* Whitespace is significant in Idris programs. Each definition in a block must begin in exactly the same column. +* Function documentation can be accessed at the REPL with the :doc command. +* Idris programs can be divided into separate source files called modules. +* The entry point to an Idris program is the main function, which must have type IO (), and be defined in the module Main. Simple interactive programs can be written by applying the repl function from main. \ No newline at end of file diff --git a/src/main/md/FunctionalProgramming/Typeclasses.md b/src/main/md/FunctionalProgramming/Typeclasses.md new file mode 100644 index 00000000..c60befa1 --- /dev/null +++ b/src/main/md/FunctionalProgramming/Typeclasses.md @@ -0,0 +1,217 @@ +## Foldable +* Definition: Foldable abstracts over containers which can be “folded” into a summary value. To make a Foldable instance you only need to implement one method: your choice of foldMap or foldr. + +```Haskell +class Foldable t where + foldr :: (a -> b -> b) -> b -> t a -> b + fold :: Monoid m => t m -> m + foldMap :: Monoid m => (a -> m) -> t a -> m +``` + +```Scala +trait Foldable[F[_]] { + + def foldMap[A,B](fa: F[A])(f: A => B): B + + /** Map each element of the structure to a [[scalaz.Monoid]], and combine the results. */ + def foldMap[A,B](fa: F[A])(f: A => B)(implicit F: Monoid[B]): B +} +``` + +* Instances: Map, Set, Tree, and Sequence + +## Equal +* equal has the same implementation requirements as Object.equals +* commutative f1 === f2 implies f2 === f1 +* reflexive f === f +* transitive f1 === f2 && f2 === f3 implies f1 === f3 + +## Order +* Can be thought like java.lang.Comparable +* def order(x: F, y: F): Ordering +* @op("<" ) def lt(x: F, y: F): Boolean = ... +* @op("<=") def lte(x: F, y: F): Boolean = ... +* @op(">" ) def gt(x: F, y: F): Boolean = ... +* @op(">=") def gte(x: F, y: F): Boolean = ... +```Scala + sealed abstract class Ordering + object Ordering { + case object LT extends Ordering + case object EQ extends Ordering + case object GT extends Ordering + } +``` + +## Show +* Can be thought like toString of Object method +* Scala’s default implicit conversions in Predef, and language level support for toString in interpolated strings, it is hard to remember to use shows instead of toString +```Scala + trait Show[F] { + def show(f: F): Cord = ... + def shows(f: F): String = ... + } +``` + +## Functor + +* Definition: A Functor represents a “container” of some sort, along with the ability to apply a function uniformly to every element in the container. It can also be considered like boxed type (Java boxed type lik Integer). Important API signature is ```fmap :: (a -> b) -> f a -> f b``` + +Functor can also be considered as "Function Lifter", such that functor takes `a -> b` function and returns a function `f a -> f b`. This is called lifting a function. + +* Known instances List, Option, Either, Function1 + +```Haskell +class Functor f where + fmap :: (a -> b) -> f a -> f b + (<$) :: a -> f b -> f a + (<$) = fmap . const +``` +```Scala + @typeclass trait Functor[F[_]] { + def map[A, B](fa: F[A])(f: A => B): F[B] + + def void[A](fa: F[A]): F[Unit] = map(fa)(_ => ()) + def fproduct[A, B](fa: F[A])(f: A => B): F[(A, B)] = map(fa)(a => (a, f(a))) + + def fpair[A](fa: F[A]): F[(A, A)] = map(fa)(a => (a, a)) + def strengthL[A, B](a: A, f: F[B]): F[(A, B)] = map(f)(b => (a, b)) + def strengthR[A, B](f: F[A], b: B): F[(A, B)] = map(f)(a => (a, b)) + + def lift[A, B](f: A => B): F[A] => F[B] = map(_)(f) + def mapply[A, B](a: A)(f: F[A => B]): F[B] = map(f)((ff: A => B) => ff(a)) + } +``` + + +## Functor laws +```Haskell +fmap id = id -- fmap preserves identity +fmap (g . h) = (fmap g) . (fmap h) -- fmap distributes over composition +``` + +* Any Functor instance satisfying the first law (fmap id = id) will automatically satisfy the second law as well. + + +## Applicative + + * Definition: Functor doesn’t allow to apply a function which is itself in a context to a value in a context. Applicative gives us function "apply" (<*>, "app", or "splat") to do that. It encapsulates certain sorts of “effectful” computations in a functionally pure way, and encourages an “applicative” programming style. It also provides a method, pure, for embedding values in a default, “effect free” context. + +```Haskell +class Functor f => Applicative f where + pure :: a -> f a + (<*>) :: f (a -> b) -> f a -> f b +``` + + +## Applicative laws +```Haskell +pure id <*> v = v -- The identity law: +pure f <*> pure x = pure (f x) -- Homomorphism +u <*> pure y = pure ($ y) <*> u -- Interchange +u <*> (v <*> w) = pure (.) <*> u <*> v <*> w -- Composition +fmap g x = pure g <*> x --Not law, relationship with functor and applicative +``` + +* Uses - Applicative functors allow you to take a "normal" function (taking non-functorial arguments) use it to operate on several values that are in functor contexts. As a corollary, this gives you effectful programming without monads. Applicative functors are useful when you need sequencing of actions, but don't need to name any intermediate results + +* A common example is parsing, where you need to run a number of actions that read parts of a data structure in order, then glue all the results together. This is like a general form of function composition: where you can think of a, b and so on as the arbitrary actions to run, and f as the functor to apply to the result. + +```f a b c d``` => ```f <$> a <*> b <*> c <*> d``` + +* The only thing a functor can do is to alter the end result of a computation via some pure function a -> b, it could have effects but can't sequence effects. Whereas application function (*>) :: f a -> f b -> f b, chains two computations and discarding the end result from the first one. it's the ability to chain computations which is the minimum requirement for effects such as mutable state in computations. + +## Traversable + +* Definition: Traversable is a Foldable Functor, traverse function is unique for Traversable. Traversable as a generalization of Functor with traverse as "effectful fmap". The map (traverse) applications in the Traversable instance take place within an Applicative context. Doing two traversals in sequence can be collapsed to a single traversal. + +```Haskell +class (Functor t, Foldable t) => Traversable t where + traverse :: Applicative f => (a -> f b) -> t a -> f (t b) + sequenceA :: Applicative f => t (f a) -> f (t a) +``` + +```Scala @ Future.scala +def traverse[A, B](in: List[A])(fn: (A) ⇒ Future[B]): Future[List[B]] +``` + + +* Traversable uses, turn a tree of lists into a list of trees. Travers functor and covert into another functor. + +Notably, Set is not Traversable +* Known instances of traversable are List, ZipList, Maybe, ((,) e), Sum, Product, Either e, Map, Tree, and Sequence. Notably, Set is not Traversable, although it is Foldable. + + +## Traversable laws +```Haskell + traverse Identity = Identity + traverse (Compose . fmap g . f) = Compose . fmap (traverse g) . traverse f --Doing two traversals in sequence can be collapsed to a single traversal +``` + +## Category + +* Definition: Category := "identity + function composition", Category generalizes the notion of function composition to general “morphisms”. + +```Haskell +-- Category with the infix function type constructor (->) +class Category arr where + id :: a `arr` a + (.) :: (b `arr` c) -> (a `arr` b) -> (a `arr` c) + +-- Category with a normal (prefix) type constructor +class Category cat where + id :: cat a a + (.) :: cat b c -> cat a b -> cat a c +``` + +* Category instances are of type * -> * -> *, i.e, which takes two type arguments. + +## Monad + +* Definition: A Monad is just a monoid in the category of endofunctors. + +```Haskell + class Monad m where + return :: a -> m a + (>>=) :: m a -> (a -> m b) -> m b +``` + + +## Arrow + +* Definition: Arrow class represents another abstraction of computation, In Monad and Applicative, types only reflect on their output, the type of an Arrow computation reflects both its input and output. Arrows generalize functions: if arr is an instance of Arrow, a value of type "b `arr` c" can be thought of as a computation which takes values of type b as input, and produces values of type c as output. + +```Haskell +class Category arr => Arrow arr where + arr :: (b -> c) -> (b `arr` c) + first :: (b `arr` c) -> ((b, d) `arr` (c, d)) +``` + +* Idempotent: Applying an action multiple times has the same result as applying the same action one + * f(f(x)) = f(x) + * "mkdir -p /tmp/dummy" + * In matrices and determinant, A * A = A, then A is idempotent +* Covariant: If Cat extends Animal, if list constructor is covariant then List[Cat] extends List[Animal] +* Contravariant: If Cat extends Animal, if list constructor is contraVariant then List[Animal] extends List[Cat] (reversal of covariant) +* Conjunction: The condition of being joined; Compound statement that uses the word AND. +* Disjunction: The condition of being disjoined; separation, disunion 'either/or'. +* Homomorphism: The most important functions between two groups are those that “preserve” the group operations, and they are called homomorphisms. (Homo=same) + A function f : G → H between two groups is a homomorphism when f(x*y) = f(x)*f(y) for all x and y in G. + * Here the multiplication in x*y is in G and the multiplication in f(x)*f(y) is in H, so a homomorphism from G to H is a function that transforms the operation in G to the operation in H. + * Homemorphic examples : Examples: c(x + y) = cx + cy, |xy| = |x||y|, (xy)^2 = x^2*y^2 +* Isomorphisms: Isomorphisms are formalized using category theory. A morphism f : X → Y in a category is an isomorphism if it admits a two-sided inverse, meaning that there is another morphism g : Y → X in that category such that gf = 1X and fg = 1Y, where 1X and 1Y are the identity morphisms of X and Y, respectively. + * An isomorphism is a pair of morphisms (i.e. functions), f and g, such that: f . g = id and g . f = id +* Adjoint functors: Adjunction is a possible relationship between two functors. A pair of adjoint functors from C to D and from D to C is what is needed to make two categories C and D compatible in their objects and morphisms. + * An adjunction between categories C and D is a pair of functors, F: D => C, G: C => D + * The functor F is called a left adjoint functor, while G is called a right adjoint functor. “F is left adjoint to G” (or equivalently, “G is right adjoint to F”) + * Endofunctors : An endofunctor is a functor from one category back to the same category. It maps objects of the category to objects of the same category. The simplest example is the identity functor which maps every object inside a category back to itself; more interesting examples map objects to other objects in the same category. + + +* Typeclass should be coherent, don’t break typeclass coherence. + * There should one Monoid[Option[Boolean]] in the program, it should not change based on scope or different imports + +### Reference +* [Second functor law is redundant](https://github.com/quchen/articles/blob/master/second_functor_law.md) +* [McBride and Paterson. The title of their classic paper, Applicative Programming with Effects](http://www.soi.city.ac.uk/~ross/papers/Applicative.html) +* [The Essence of the Iterator Pattern](http://www.comlab.ox.ac.uk/jeremy.gibbons/publications/iterator.pdf) +* [A taste of category theory for computer scientists](http://repository.cmu.edu/cgi/viewcontent.cgi?article=2846&context=compsci) + diff --git a/src/main/md/FunctionalProgramming/Typescript.md b/src/main/md/FunctionalProgramming/Typescript.md new file mode 100644 index 00000000..d5b50cd7 --- /dev/null +++ b/src/main/md/FunctionalProgramming/Typescript.md @@ -0,0 +1,58 @@ +```Typescript +=> is called fat arrow or lambda operator + +var add: (x: number, y: number) => number = function(x: number, y: number) : number { + return x+y +} + +var $ = (id) => document.getElementBy(id); + +above is equivalent to + +var $ = function(id) { return document.getElementBy(id); }; + +//id is called inline parameter (since it doesn't have type parameter) +// => fat arrow is separator between parameter and function body + + +class Calc { + add(x: number, y: number): number { + return x+y; + } +} + +--- + +function add(msg: string, x: number, y: number) { + console.log(msg + (x+y)); +} + +add('Total =',3,4); + +function buildName(firstName: string, lastName = "Smith") { + return firstName + " " + lastName; +} + +var result1 = buildName("Bob"); //works correctly now, also +var result2 = buildName("Bob", "Adams", "Sr."); //error, too many parameters +var result3 = buildName("Bob", "Adams"); //ah, just right + +--- +Optional parameters and default parameters also share what the type looks like. Both: + +function buildName(firstName: string, lastName?: string) { +and +function buildName(firstName: string, lastName = "Smith") { + +share the same type "(firstName: string, lastName?: string)=>string". +The default value of the default parameter disappears, leaving only the knowledge that the parameter is optional. + +--- + +function buildName(firstName: string, ...restOfName: string[]) { + return firstName + " " + restOfName.join(" "); +} + +var employeeName = buildName("Joseph", "Samuel", "Lucas", "MacKinzie"); + +``` diff --git a/src/main/md/functor_applicative_monad.md b/src/main/md/FunctionalProgramming/functor_applicative_monad.md similarity index 58% rename from src/main/md/functor_applicative_monad.md rename to src/main/md/FunctionalProgramming/functor_applicative_monad.md index 23d4e1ef..ddd1e082 100644 --- a/src/main/md/functor_applicative_monad.md +++ b/src/main/md/FunctionalProgramming/functor_applicative_monad.md @@ -1,27 +1,61 @@ -# Functor, Mondad and applicative -* Functor      |  fmap :: Functor f => (a -> b) -> f a -> f b -* Applicative |  (<*>) :: Applicative f => f (a -> b) -> f a -> f b -* Monad m   |  (>>=) :: Monad m => m a -> (a -> m b) -> m b -* Monoid m  |  (mappend) :: a -> a -> a ; mempty :: a -* *lower case variables represents generic type, not variable* - -# Monoid laws -* (x <> y) <> z = x <> (y <> z) -- associativity -* mempty <> x = x -- left identity -* x <> mempty = x -- right identity - - -# Applicative laws -* pure id <*> v = v -- Identity -* pure f <*> pure x = pure (f x) -- Homomorphism -* u <\*> pure y = pure ($ y) <\*> u -- Interchange -* pure (.) <\*> u <\*> v <\*> w = u <\*> (v <\*> w) -- Composition -* * *pure as a way to inject values into the functor in a default,* -* * *The interchange law says that applying a morphism to a "pure" value pure y is the same as applying pure ($ y) to the morphism. -* * *($ y) is the function that supplies y as argument to another function.* -* * *The composition law says that pure (.) composes morphisms similarly to how (.) composes functions: applying the composed morphism pure (.) <*> u <*> v to w gives the same result as applying u to the result of applying v to w* - -# Monad laws -* m >>= return = m -- right unit -* return x >>= f = f x -- left unit -* (m >>= f) >>= g = m >>= (\x -> f x >>= g) -- associativity +# Functor, Mondad and applicative +* Functor      |  fmap :: Functor f => (a -> b) -> f a -> f b +* Applicative |  (<*>) :: Applicative f => f (a -> b) -> f a -> f b +* Monad m   |  (>>=) :: Monad m => m a -> (a -> m b) -> m b +* Monoid m  |  (mappend) :: a -> a -> a ; mempty :: a +* *lower case variables represents generic type, not variable* + +# Monoid laws +* (x <> y) <> z = x <> (y <> z) -- associativity +* mempty <> x = x -- left identity +* x <> mempty = x -- right identity + + +# Applicative laws +* pure id <*> v = v -- Identity +* pure f <*> pure x = pure (f x) -- Homomorphism +* u <\*> pure y = pure ($ y) <\*> u -- Interchange +* pure (.) <\*> u <\*> v <\*> w = u <\*> (v <\*> w) -- Composition +* * *pure as a way to inject values into the functor in a default,* +* * *The interchange law says that applying a morphism to a "pure" value pure y is the same as applying pure ($ y) to the morphism. +* * *($ y) is the function that supplies y as argument to another function.* +* * *The composition law says that pure (.) composes morphisms similarly to how (.) composes functions: applying the composed morphism pure (.) <*> u <*> v to w gives the same result as applying u to the result of applying v to w* + +# Monad laws +* m >>= return = m -- right unit +* return x >>= f = f x -- left unit +* (m >>= f) >>= g = m >>= (\x -> f x >>= g) -- associativity + + +# Applicative usage + +```scala +object Applicative extends App { + case class User(userFirstName: String, userLastName: String, userEmail: String) + + val userFactory: (String, String, String) => User = (a: String, b: String, c: String) => User(a, b, c) + + val dynamicUserFactory: (Map[String, String]) => Option[User] = { userprops => + for { + userFirstName <- userprops.get("userFirstName") + userLastName <- userprops.get("userLastName") + userEmail <- userprops.get("userEmail") + } yield userFactory(userFirstName, userLastName, userEmail) + } + + /** + * Now using applicative, we are able to acheive without monad + */ + val applicativeUserFactory: (Map[String, String]) => Option[User] = { props => + //Here Apply method takes Type parameter, it should matching with Functor type of the Applicative arguments + val applicativeFactory = Apply[Option].lift3(userFactory) + applicativeFactory(props.get("userFirstName"), props.get("userFirstName"), props.get("userFirstName")) + + } + +} +``` + + +# References +* [Application Practical](https://pbrisbin.com/posts/applicative_functors/) \ No newline at end of file diff --git a/src/main/md/FunctionalProgramming/typeclass_api.md b/src/main/md/FunctionalProgramming/typeclass_api.md new file mode 100644 index 00000000..03b784ab --- /dev/null +++ b/src/main/md/FunctionalProgramming/typeclass_api.md @@ -0,0 +1,37 @@ +| Typeclass | Method | From | Given | To | +| ------------- |:-------------:|:-------------:| -----:|:-------------:| +|InvariantFunctor | xmap | F[A] | A => B, B => A | F[B] | +|Contravariant |contramap | F[A] | B => A | F[B] | +|Functor | map | F[A] | A => B | F[B] | +|Apply | ap / <*> | F[A] | F[A => B] | F[B] | +| | apply2 | F[A], F[B] | (A, B) => C | F[C] | +|Divide | divide2 | F[A], F[B] | C => (A, B) | F[C] | +|Bind | bind / >>= | F[A] | A => F[B] | F[B] | + | | join | F[F[A]] | | F[A] | +|Cobind | cobind | F[A] | F[A] => B | F[B] | +| | cojoin | F[A] | | F[F[A]] | +|Applicative | point | A | | F[A] | +|Comonad | copoint | F[A] | | A | +|Semigroup | append | A, A | | A | +|Plus | plus / <+> | F[A], F[A] | | F[A] | +|MonadPlus | withFilter | F[A] | A => Boolean | F[A] | +|Align | align | F[A], F[B] | | F[A \&/ B] | +| | merge | F[A], F[A] | | F[A] | +|Zip | zip | F[A], F[B] | | F[(A, B)] | +|Unzip | unzip | F(A, B) | | (F[A], F[B]) | +|Cozip | cozip | F[A \/ B] | | F[A] \/ F[B] | +|Foldable | foldMap | F[A] | A => B | B | + | | foldMapM | F[A] | A => G[B] | G[B] | +|Traverse | traverse F[A] A => G[B] G[F[B]] | +| | sequence | F[G[A]] | G[F[A]] | +|Equal | equal / | === A, A | | Boolean | +|Show | shows | A | String | +|Bifunctor | bimap | F[A, B] | A => C, B => D | F[C, D] | +| | leftMap | F[A, B] | A => C | F[C, B] | +| | rightMap | F[A, B] | B => C | F[A, C] | +|Bifoldable | bifoldMap | F[A, B] | A => C, B => C | C | +|(with MonadPlus) | separate | F[G[A, B]] | | (F[A], F[B]) | +|Bitraverse | bitraverse | F[A, B] | A => G[C], B => G[D] | G[F[C, D]] | +| | bisequence | F[G[A], G[B]] | | G[F[A, B]] | + +* [scalaz typeclass](http://arosien.github.io/scalaz-cheatsheets/typeclasses.pdf) \ No newline at end of file diff --git a/src/main/md/Health/exercise.md b/src/main/md/Health/exercise.md new file mode 100644 index 00000000..167d3dcb --- /dev/null +++ b/src/main/md/Health/exercise.md @@ -0,0 +1,19 @@ +* [8 Best Dumbbell Exercises Ever HIT EVERY MUSCLE!](https://www.youtube.com/watch?v=y1r9toPQNkM) + * Dumbbell thursters + * Crush Grib Goblet Squat + +# Meal plan +* Stick to simple meal without carb + * Oat meal with raisin + * Chicken breast with 3 vegetables + * Carrot + * Broccoli + * Long green beans + * 3 quarter cup of Three Bean Salad + * Spinach with boiled egg + +# Break fast + * Five boiled egg white with oatmeal and apples + * Plain greek yogurt with Banana + * Lentil soup as snack (sundal) + * \ No newline at end of file diff --git a/src/main/md/Investing/MACD.md b/src/main/md/Investing/MACD.md new file mode 100644 index 00000000..4b15a598 --- /dev/null +++ b/src/main/md/Investing/MACD.md @@ -0,0 +1,62 @@ +# Moving convergence and divergence indicator + +* 12, 26, 9 - Days as base + * Fast and slow moving average (26 and 12) +* Difference between 12 and 26 with base as 9 +* 9 - Signale line +* Centered Oscilator +* Centered oscilators + * Are good to find Strength or Weakness + * To find direction of momentum +* MACD centerline = macd zero line +* When MACD line is going below zero, 26 day moving average is going below 12 day moving average - Bearish +* When histogram is +ve, MACD is above 9 days moving average +* When histogram is -ve, MACD is below 9 days moving average + +# 3 signals from MACD + +* MACD/Signal cross over - lagging +* MACD centerline cross over - reliable for long term (12 EMA cross 26) +* Divergence with price - Most powerful and leading indicator + * Occurs least often + +# general signals from MACD + +* MACD crossing zero-line (from below to above zero) is considered bullish +* Crossing center-line is lagging, but very reliable + +# MACD Histogram + +* Histogram is indicator for MACD indicator, very powerful +* Histogram represents difference between MACD and signl line +* Movements in the histogram is independent of MACD line itself +* Changes in the MACD direction often preceded by divergence in the histogram + * If histogram successive lows should indicate the MACD line like a leading indicator + + +# Powerful convergence and divergence + +* Divergence - diverging with trend of the underlying price (acts as leading indicator) + * Price hits higher and higher, but MACD hits lower highs.. (or vice versa) +* MACD line diverges with the actual price, and act as leading indicator + * When MACD's successive high are falling whereas actual price is going high, change is immenient, it would crash + * Higher highs in price, lower lows in MACD, bearish + * When MACD's successive lows are increases whereas actual price is going low, change is immenient, it would be bullish + * Lower lows in price, higher high in MACD, bullish + +# Oscilators + +* When oscilators are below zero line - bearish mode +* When oscilators are above zero line - bearish mode +* Moving averages are lagging indicators +* Exponential moving averages are faster than simple moving averages + + +# For effective MACD usage + +* Use this in combination with other banded indicator such as RSI/CCI/Stochaistic +* That would help to find overbought or oversold indication + + +## Reference +* [Mark Ledermann - Technical Analysis Indicator MACD part three](https://www.youtube.com/watch?v=4ybk72R9_90) \ No newline at end of file diff --git a/src/main/md/JPM_Jobs.md b/src/main/md/JPM_Jobs.md new file mode 100644 index 00000000..e0f1988c --- /dev/null +++ b/src/main/md/JPM_Jobs.md @@ -0,0 +1,11 @@ + + +## JPM Jobs Reference +[JPM Jobs](https://jobs.jpmorganchase.com/) +[Technology Jobs] (https://jobs.jpmorganchase.com/ListJobs/All/Search/JPMC-Job-Category/Technology/Country/SG/) +[Java jobs] (https://jobs.jpmorganchase.com/ListJobs/All/Search/country/sg/jobtitle/java/country/sg/jpmc-job-category/technology/) + +## JobId URL +* https://jobs.jpmorganchase.com/ShowJob/Id/199167 +* https://jobs.jpmorganchase.com/ShowJob/Id/199167/Java-Software-Engineer,-Wealth-Management,-Associate/ +* https://jobs.jpmorganchase.com/ShowJob/Id/199167/testOne122 \ No newline at end of file diff --git a/src/main/md/Java/Test.class b/src/main/md/Java/Test.class new file mode 100644 index 00000000..021d3a6d Binary files /dev/null and b/src/main/md/Java/Test.class differ diff --git a/src/main/md/Java/Test.java b/src/main/md/Java/Test.java new file mode 100644 index 00000000..9046141e --- /dev/null +++ b/src/main/md/Java/Test.java @@ -0,0 +1,9 @@ +import java.util.Arrays; +import java.util.stream.*; + +class Test { + public static void main(String[] args) { + var colors = Arrays.asList("red", "green", "blue", "yellow"); + java.util.Collections.reverse(colors).forEach(System.out::println); + } +} diff --git a/src/main/md/Java/beautiful_idiomatic_java.md b/src/main/md/Java/beautiful_idiomatic_java.md new file mode 100644 index 00000000..41e11bf5 --- /dev/null +++ b/src/main/md/Java/beautiful_idiomatic_java.md @@ -0,0 +1,686 @@ +# Transforming Code into Beautiful, Idiomatic ~~Python~~ Java + +Notes from Mohan Narayanaswamy's future talk at that "Java is not that verbose". + +The code examples and direct quotes are all from Raymond's talk. Jeff Paine created notes for python, I am cloning it for java. + +## Looping over a range of numbers + +```python +for i in range(6): + print i**2 +``` + + +```java + IntStream.range(1, 6).map(i -> i*i).forEach(System.out::println) +``` + +## Looping over a collection + +```python +colors = ['red', 'green', 'blue', 'yellow'] + +for color in colors: + print color +``` + +```java + var colors = Arrays.asList("red", "green", "blue", "yellow"); + colors.forEach(System.out::println); +``` + +## Looping backwards + +```python +colors = ['red', 'green', 'blue', 'yellow'] + +for i in range(len(colors)-1, -1, -1): + print colors[i] +``` + +### Better + +```python +for color in reversed(colors): + print color +``` + +## Looping over a collection and indices + +```python +colors = ['red', 'green', 'blue', 'yellow'] + +for i in range(len(colors)): + print i, '--->', colors[i] +``` + +### Better + +```python +for i, color in enumerate(colors): + print i, '--->', color +``` +> It's fast and beautiful and saves you from tracking the individual indices and incrementing them. + +> Whenever you find yourself manipulating indices [in a collection], you're probably doing it wrong. + +## Looping over two collections + +```python +names = ['raymond', 'rachel', 'matthew'] +colors = ['red', 'green', 'blue', 'yellow'] + +n = min(len(names), len(colors)) +for i in range(n): + print names[i], '--->', colors[i] + +for name, color in zip(names, colors): + print name, '--->', color +``` + +### Better + +```python +for name, color in izip(names, colors): + print name, '--->', color +``` + +`zip` creates a new list in memory and takes more memory. `izip` is more efficient than `zip`. +Note: in python 3 `izip` was renamed to `zip` and promoted to a builtin replacing the old `zip`. + +## Looping in sorted order + +```python +colors = ['red', 'green', 'blue', 'yellow'] + +# Forward sorted order +for color in sorted(colors): + print colors + +# Backwards sorted order +for color in sorted(colors, reverse=True): + print colors +``` + +## Custom Sort Order + +```python +colors = ['red', 'green', 'blue', 'yellow'] + +def compare_length(c1, c2): + if len(c1) < len(c2): return -1 + if len(c1) > len(c2): return 1 + return 0 + +print sorted(colors, cmp=compare_length) +``` + +### Better + +```python +print sorted(colors, key=len) +``` + +The original is slow and unpleasant to write. Also, comparison functions are no longer available in python 3. + +## Call a function until a sentinel value + +```python +blocks = [] +while True: + block = f.read(32) + if block == '': + break + blocks.append(block) +``` + +### Better + +```python +blocks = [] +for block in iter(partial(f.read, 32), ''): + blocks.append(block) +``` + +`iter` takes two arguments. The first you call over and over again and the second is a sentinel value. + +## Distinguishing multiple exit points in loops + +```python +def find(seq, target): + found = False + for i, value in enumerate(seq): + if value == target: + found = True + break + if not found: + return -1 + return i +``` + +### Better + +```python +def find(seq, target): + for i, value in enumerate(seq): + if value == target: + break + else: + return -1 + return i +``` + +Inside of every `for` loop is an `else`. + +## Looping over dictionary keys + +```python +d = {'matthew': 'blue', 'rachel': 'green', 'raymond': 'red'} + +for k in d: + print k + +for k in d.keys(): + if k.startswith('r'): + del d[k] +``` + +When should you use the second and not the first? When you're mutating the dictionary. + +> If you mutate something while you're iterating over it, you're living in a state of sin and deserve what ever happens to you. + +`d.keys()` makes a copy of all the keys and stores them in a list. Then you can modify the dictionary. +Note: in python 3 to iterate through a dictionary you have to explicidly write: `list(d.keys())` because `d.keys()` returns a "dictionary view" (an iterable that provide a dynamic view on the dictionary’s keys). See [documentation](https://docs.python.org/3/library/stdtypes.html#dict-views). + +## Looping over dictionary keys and values + +```python +# Not very fast, has to re-hash every key and do a lookup +for k in d: + print k, '--->', d[k] + +# Makes a big huge list +for k, v in d.items(): + print k, '--->', v +``` + +### Better + +```python +for k, v in d.iteritems(): + print k, '--->', v +``` + +`iteritems()` is better as it returns an iterator. +Note: in python 3 there is no `iteritems()` and `items()` behaviour is close to what `iteritems()` had. See [documentation](https://docs.python.org/3/library/stdtypes.html#dict-views). + +## Construct a dictionary from pairs + +```python +names = ['raymond', 'rachel', 'matthew'] +colors = ['red', 'green', 'blue'] + +d = dict(izip(names, colors)) +# {'matthew': 'blue', 'rachel': 'green', 'raymond': 'red'} +``` +For python 3: `d = dict(zip(names, colors))` + +## Counting with dictionaries + +```python +colors = ['red', 'green', 'red', 'blue', 'green', 'red'] + +# Simple, basic way to count. A good start for beginners. +d = {} +for color in colors: + if color not in d: + d[color] = 0 + d[color] += 1 + +# {'blue': 1, 'green': 2, 'red': 3} +``` + +### Better + +```python +d = {} +for color in colors: + d[color] = d.get(color, 0) + 1 + +# Slightly more modern but has several caveats, better for advanced users +# who understand the intricacies +d = defaultdict(int) +for color in colors: +d[color] += 1 +``` + +## Grouping with dictionaries -- Part I and II + +```python +names = ['raymond', 'rachel', 'matthew', 'roger', + 'betty', 'melissa', 'judith', 'charlie'] + +# In this example, we're grouping by name length +d = {} +for name in names: + key = len(name) + if key not in d: + d[key] = [] + d[key].append(name) + +# {5: ['roger', 'betty'], 6: ['rachel', 'judith'], 7: ['raymond', 'matthew', 'melissa', 'charlie']} + +d = {} +for name in names: + key = len(name) + d.setdefault(key, []).append(name) +``` + +### Better + +```python +d = defaultdict(list) +for name in names: + key = len(name) + d[key].append(name) +``` + +## Is a dictionary popitem() atomic? + +```python +d = {'matthew': 'blue', 'rachel': 'green', 'raymond': 'red'} + +while d: + key, value = d.popitem() + print key, '-->', value +``` + +`popitem` is atomic so you don't have to put locks around it to use it in threads. + +## Linking dictionaries + +```python +defaults = {'color': 'red', 'user': 'guest'} +parser = argparse.ArgumentParser() +parser.add_argument('-u', '--user') +parser.add_argument('-c', '--color') +namespace = parser.parse_args([]) +command_line_args = {k:v for k, v in vars(namespace).items() if v} + +# The common approach below allows you to use defaults at first, then override them +# with environment variables and then finally override them with command line arguments. +# It copies data like crazy, unfortunately. +d = defaults.copy() +d.update(os.environ) +d.update(command_line_args) +``` + +### Better + +```python +d = ChainMap(command_line_args, os.environ, defaults) +``` + +`ChainMap` has been introduced into python 3. Fast and beautiful. + +## Improving Clarity + * Positional arguments and indicies are nice + * Keywords and names are better + * The first way is convenient for the computer + * The second corresponds to how human’s think + +## Clarify function calls with keyword arguments + +```python +twitter_search('@obama', False, 20, True) +``` + +### Better + +```python +twitter_search('@obama', retweets=False, numtweets=20, popular=True) +``` + +Is slightly (microseconds) slower but is worth it for the code clarity and developer time savings. + +## Clarify multiple return values with named tuples + +```python +# Old testmod return value +doctest.testmod() +# (0, 4) +# Is this good or bad? You don't know because it's not clear. +``` + +### Better + +```python +# New testmod return value, a namedTuple +doctest.testmod() +# TestResults(failed=0, attempted=4) +``` + +A namedTuple is a subclass of tuple so they still work like a regular tuple, but are more friendly. + +To make a namedTuple: + +```python +TestResults = namedTuple('TestResults', ['failed', 'attempted']) +``` + +## Unpacking sequences + +```python +p = 'Raymond', 'Hettinger', 0x30, 'python@example.com' + +# A common approach / habit from other languages +fname = p[0] +lname = p[1] +age = p[2] +email = p[3] +``` + +### Better + +```python +fname, lname, age, email = p +``` + +The second approach uses tuple unpacking and is faster and more readable. + +## Updating multiple state variables + +```python +def fibonacci(n): + x = 0 + y = 1 + for i in range(n): + print x + t = y + y = x + y + x = t +``` + +### Better + +```python +def fibonacci(n): + x, y = 0, 1 + for i in range(n): + print x + x, y = y, x + y +``` + +Problems with first approach + + * x and y are state, and state should be updated all at once or in between lines that state is mis-matched and a common source of issues + * ordering matters + * it's too low level + + +The second approach is more high-level, doesn't risk getting the order wrong and is fast. + +## Simultaneous state updates + +```python +tmp_x = x + dx * t +tmp_y = y + dy * t +tmp_dx = influence(m, x, y, dx, dy, partial='x') +tmp_dy = influence(m, x, y, dx, dy, partial='y') +x = tmp_x +y = tmp_y +dx = tmp_dx +dy = tmp_dy +``` + +### Better + +```python +x, y, dx, dy = (x + dx * t, + y + dy * t, + influence(m, x, y, dx, dy, partial='x'), + influence(m, x, y, dx, dy, partial='y')) +``` + +## Efficiency + * An optimization fundamental rule + * Don’t cause data to move around unnecessarily + * It takes only a little care to avoid O(n**2) behavior instead of linear behavior + +> Basically, just don't move data around unecessarily. + +## Concatenating strings + +```python +names = ['raymond', 'rachel', 'matthew', 'roger', + 'betty', 'melissa', 'judith', 'charlie'] + +s = names[0] +for name in names[1:]: + s += ', ' + name +print s +``` + +### Better + +```python +print ', '.join(names) +``` + +## Updating sequences + +```python +names = ['raymond', 'rachel', 'matthew', 'roger', + 'betty', 'melissa', 'judith', 'charlie'] + +del names[0] +# The below are signs you're using the wrong data structure +names.pop(0) +names.insert(0, 'mark') +``` + +### Better + +```python +names = deque(['raymond', 'rachel', 'matthew', 'roger', + 'betty', 'melissa', 'judith', 'charlie']) + +# More efficient with deque +del names[0] +names.popleft() +names.appendleft('mark') +``` +## Decorators and Context Managers + * Helps separate business logic from administrative logic + * Clean, beautiful tools for factoring code and improving code reuse + * Good naming is essential. + * Remember the Spiderman rule: With great power, comes great responsibility! + +## Using decorators to factor-out administrative logic + +```python +# Mixes business / administrative logic and is not reusable +def web_lookup(url, saved={}): + if url in saved: + return saved[url] + page = urllib.urlopen(url).read() + saved[url] = page + return page +``` + +### Better + +```python +@cache +def web_lookup(url): + return urllib.urlopen(url).read() +``` + +Note: since python 3.2 there is a decorator for this in the standard library: `functools.lru_cache`. + +## Factor-out temporary contexts + +```python +# Saving the old, restoring the new +old_context = getcontext().copy() +getcontext().prec = 50 +print Decimal(355) / Decimal(113) +setcontext(old_context) +``` + +### Better + +```python +with localcontext(Context(prec=50)): + print Decimal(355) / Decimal(113) +``` + +## How to open and close files + +```python +f = open('data.txt') +try: + data = f.read() +finally: + f.close() +``` + +### Better + +```python +with open('data.txt') as f: + data = f.read() +``` + +## How to use locks + +```python +# Make a lock +lock = threading.Lock() + +# Old-way to use a lock +lock.acquire() +try: + print 'Critical section 1' + print 'Critical section 2' +finally: + lock.release() +``` + +### Better + +```python +# New-way to use a lock +with lock: + print 'Critical section 1' + print 'Critical section 2' +``` + +## Factor-out temporary contexts + +```python +try: + os.remove('somefile.tmp') +except OSError: + pass +``` + +### Better + +```python +with ignored(OSError): + os.remove('somefile.tmp') +``` + +`ignored` is is new in python 3.4, [documentation](http://docs.python.org/dev/library/contextlib.html#contextlib.ignored). +Note: `ignored` is actually called `suppress` in the standard library. + +To make your own `ignored` context manager in the meantime: + +```python +@contextmanager +def ignored(*exceptions): + try: + yield + except exceptions: + pass +``` + +> Stick that in your utils directory and you too can ignore exceptions + +## Factor-out temporary contexts + +```python +# Temporarily redirect standard out to a file and then return it to normal +with open('help.txt', 'w') as f: + oldstdout = sys.stdout + sys.stdout = f + try: + help(pow) + finally: + sys.stdout = oldstdout +``` + +### Better + +```python +with open('help.txt', 'w') as f: + with redirect_stdout(f): + help(pow) +``` + +`redirect_stdout` is proposed for python 3.4, [bug report](http://bugs.python.org/issue15805). + +To roll your own `redirect_stdout` context manager + +```python +@contextmanager +def redirect_stdout(fileobj): + oldstdout = sys.stdout + sys.stdout = fileobj + try: + yield fieldobj + finally: + sys.stdout = oldstdout +``` + +## Concise Expressive One-Liners +Two conflicting rules: + + * Don’t put too much on one line + * Don’t break atoms of thought into subatomic particles + +Raymond’s rule: + + * One logical line of code equals one sentence in English + +## List Comprehensions and Generator Expressions + +```python +result = [] +for i in range(10): +s = i ** 2 + result.append(s) +print sum(result) +``` + +### Better + +```python +print sum(i**2 for i in xrange(10)) +``` + +First way tells you what to do, second way tells you what you want. + + +## Appendix +```java +import java.util.stream.*; + +class Test { + public static void main(String[] args) { + IntStream.range(1, 6).map(i -> i*i).forEach(System.out::println); + } +} +``` \ No newline at end of file diff --git a/src/main/md/Java/effective_java.md b/src/main/md/Java/effective_java.md new file mode 100644 index 00000000..63a6c9b6 --- /dev/null +++ b/src/main/md/Java/effective_java.md @@ -0,0 +1 @@ +# Java Tips and Tricks from open source code diff --git a/src/main/md/Java/google_guice_code_read.md b/src/main/md/Java/google_guice_code_read.md new file mode 100644 index 00000000..ada2a36c --- /dev/null +++ b/src/main/md/Java/google_guice_code_read.md @@ -0,0 +1,12 @@ +# [Guice](https://github.com/google/guice) +* Guice : Module => Inector +* Injector::injectMembers = (Object instance) => void +* Injector::getInstance:: = (Class type) => T +* Guice can't inject non-static-inner class + + +# Code read +* com\google\inject\internal\util + * Classes - find if concrete, abstract, find member types + * + \ No newline at end of file diff --git a/src/main/md/Java/http_client.jshell b/src/main/md/Java/http_client.jshell new file mode 100644 index 00000000..8c250437 --- /dev/null +++ b/src/main/md/Java/http_client.jshell @@ -0,0 +1,12 @@ +import java.net.http.HttpClient.* +import java.net.http.*; + +var client = HttpClient.newBuilder().version(Version.HTTP_2).followRedirects(Redirect.NORMAL).build() +var request = HttpRequest.newBuilder(URI.create("https://developer.mozilla.org/bm/docs/Web/JavaScript/Reference/Functions/Arrow_functions")).build() +var response = client.send(request,HttpResponse.BodyHandlers.ofString()) +var content = response.body().toString() +System.out.println(content) +response.statusCode() +response.headers() +var asyncResponse = client.sendAsync(request,HttpResponse.BodyHandlers.ofString()) +asyncResponse.thenApply(HttpResponse::body).get() \ No newline at end of file diff --git a/src/main/md/Java/java_oneliner.md b/src/main/md/Java/java_oneliner.md new file mode 100644 index 00000000..e350dde6 --- /dev/null +++ b/src/main/md/Java/java_oneliner.md @@ -0,0 +1,8 @@ +```find the jar of the className +java -verbose:class -classpath $(echo *.jar | sed ‘s/ /:/g’) com.anything.yourclass | grep “yourclass” +``` + +```Quick analysis for xception handling in java code +find . -name \*java | grep -v “test.*est” | xargs grep -A 4 “catch.*xception” > exceptionHandling.txt +grep -A 4 catch.*xception `find . -type f -name \*java | grep -v test` > xception.log +``` \ No newline at end of file diff --git a/src/main/md/Java/java_snippets.md b/src/main/md/Java/java_snippets.md new file mode 100644 index 00000000..d9ffe1bd --- /dev/null +++ b/src/main/md/Java/java_snippets.md @@ -0,0 +1,92 @@ +## JDK-9 Reactive Streams - Publish subscribe framework - java.util.concurrent.Flow +* + + + + ```java + var fruits = List.of("apple", "orange", "banana"); + var anotherSetOfFruits = Set.of("apple", "orange", "banana"); + var map = Map.of("a","b","c","d"); + // Use diamond operator, don't specify type while creating, compiler could infer + List list2 = new ArrayList<>(); + BiFunction biss = (i, _) -> String.valueOf(i); + var p = new ProcessBuilder("notepad.exe").start(); + System.out.println(p.pid()) + System.out.println(ProcessHandle.current().info().toString()); + ProcessHandle.allProcesses().filter(ph -> ph.info().command().isPresent()).limit(4).forEach(p -> System.out.printf("Process id: %s%n", p.info().toString())); + + //-- + StackWalker.getInstance(java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE).getCallerClass() + StackWalker.getInstance(java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE).forEach(System.out::println) + ``` +## You can have private static method as helper method inside interface +## Multi-release JAR files +``` +final FileInputStream fis = new FileInputStream("movie.mp4"); + +try (fis) +{ +//dodo +} +catch (IOException e) +{ + // ... +} +``` + +```jdk11 + + org.glassfish.jaxb + jaxb-runtime + 2.4.0-b180608.0325 + +``` + +## Modern Java Issues +### [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.7.0:compile (default-cli) on project auth-web-basic: Fatal error compiling: java.lang.ExceptionInInitializerError: com.sun.tools.javac.code.TypeTags -> [Help 1] +```xml + + org.projectlombok + lombok + 1.18.4 + +``` +### [Error] XML parser not in JDK + +```xml + + javax.xml.bind + jaxb-api + 2.2.11 + + + com.sun.xml.bind + jaxb-core + 2.2.11 + + + com.sun.xml.bind + jaxb-impl + 2.2.11 + + + javax.activation + activation + 1.1.1 + +``` + +## Flow References +* [Reactive Streams example - Java 9](https://aboullaite.me/reactive-streams-example-java-9/) +* [Reactive Streams In Java 9](https://www.javagists.com/reactive-streams-java-9) +* [Publish-Subscribe mit der Flow-API in Java 9](https://blog.oio.de/2018/05/04/publish-subscribe-mit-der-flow-api-in-java-9/) +* [Reactive Programming with JDK 9 Flow API](https://community.oracle.com/docs/DOC-1006738) +* [The Essential Java 9 Feature You Probably Never Heard Of](https://blog.takipi.com/the-essential-java-9-feature-you-probably-never-heard-of/) +* [Reactive Streams in Java 9](https://dzone.com/articles/reactive-streams-in-java-9) +* [Java 9 Flow API – Reactive Streams](https://grokonez.com/java/java-9/java-9-flow-api-reactive-streams) +* [Flow API](https://docs.oracle.com/javase/9/docs/api/java/util/concurrent/Flow.html) +## Java 9 References +* [Java 9 Features with Examples](https://www.journaldev.com/13121/java-9-features-with-examples) +* [Java 9 series: Concurrency Updates](https://www.voxxed.com/2016/10/java-9-series-concurrency-updates/) +* [JEP 266: More Concurrency Updates](http://openjdk.java.net/jeps/266) +* [Java Platform, Standard Edition What’s New in Oracle JDK 9](https://docs.oracle.com/javase/9/whatsnew/toc.htm) diff --git a/src/main/md/Java/jshell_jdk_11.md b/src/main/md/Java/jshell_jdk_11.md new file mode 100644 index 00000000..86fbf65e --- /dev/null +++ b/src/main/md/Java/jshell_jdk_11.md @@ -0,0 +1,36 @@ +# Jshell + +```jshell +/help /vars +/vars +import java.util. +import java.util.regex +/i +``` + + +```jshell +/help /exit +/help /vars +/vars +//you can also type import java.util. +import java.util.regex +//show imports +/i +var a= 30 +var b= 35 +b*a +/vars +/vars $4 +$4 +"foo".repeat(3) +"foo".isEmpty() +"foo".isBlank() +``` + +```bat +jshell https://kishida.github.io/misc/jframe.jshell +jshell https://gist.githubusercontent.com/mohanmca/88de9d6115587f9b8c6e8ac73b80f46e/raw/a6f272479026f8bb5d79f01f9cbab631e04cb78c/jshell.jshell +``` + +# Reference \ No newline at end of file diff --git a/src/main/md/Java/upto_date_java.md b/src/main/md/Java/upto_date_java.md new file mode 100644 index 00000000..baf0d89b --- /dev/null +++ b/src/main/md/Java/upto_date_java.md @@ -0,0 +1,10 @@ +## Keeping up to date with Java + +* https://github.com/trending/java +* https://www.javaworld.com/article/3164262/java-language/java-9s-other-new-enhancements-part-1.html to part-6 +* Validation engine - https://github.com/lesfurets/dOOv/wiki/Getting-Started +* Fluent API, Type safe +* Failure validation is human readable +* https://github.com/lesfurets/dOOv +* https://static.rainfocus.com/oracle/oraclecode18/sess/1525874149985001Q3XY/PF/DSL.using%28java%29.toGoBeyond%28BeanValidation%29.at%28OracleCode%29%3B_1530359519477001SbsB.pdf +* \ No newline at end of file diff --git a/src/main/md/Javascript/.npmrc b/src/main/md/Javascript/.npmrc new file mode 100644 index 00000000..9fde57a8 --- /dev/null +++ b/src/main/md/Javascript/.npmrc @@ -0,0 +1,3 @@ +progress=false +save=true +save-exact=true \ No newline at end of file diff --git a/src/main/md/Javascript/D3.JS.md b/src/main/md/Javascript/D3.JS.md new file mode 100644 index 00000000..711e7da4 --- /dev/null +++ b/src/main/md/Javascript/D3.JS.md @@ -0,0 +1,37 @@ +* D3.js uses SVG internally, knowledge of SVG is quite useful +* D3.js uses css3 selector to select element +* Slected element can get or set thing like + * attributes + * properties + * styles + * text-content + * HTML +* Select and apply operators + + +```html + + + +``` +```js +d3 + .select("body") + .append("svg") + .attr("width", 50).attr("height", 50) + .append("circle") + .attr("cx", 25).attr("cy", 25).attr("r", 25) + .style("fill", "purple"); +``` + + +## Reference +* [Building blocks editor](https://blockbuilder.org/) +* [Popular Blocks](https://bl.ocks.org/) +* [a fun, difficult introduction to d3](https://tmcw.github.io/presentations/dcjq/) +* [Binding data](https://alignedleft.com/tutorials/d3/binding-data/) +* [How Selections Work](https://bost.ocks.org/mike/selection/) +* [A linguistic introduction to d3.js](https://medium.freecodecamp.org/a-linguistic-introduction-to-d3-js-7a40a980bf97) +* [D3 Tutorial Table of Contents](https://www.dashingd3js.com/adding-a-dom-element) +* [Thinking with Joins](https://bost.ocks.org/mike/join/) +* [Path data generation helper for SVG elements](https://gist.github.com/potch/4214346) \ No newline at end of file diff --git a/src/main/md/Javascript/ES2018.md b/src/main/md/Javascript/ES2018.md new file mode 100644 index 00000000..4945ac71 --- /dev/null +++ b/src/main/md/Javascript/ES2018.md @@ -0,0 +1,5 @@ +* [Object rest and spread properties](https://developers.google.com/web/updates/2017/06/object-rest-spread) +* [Promise.prototype.finally](https://developers.google.com/web/updates/2017/10/promise-finally) +* [Async iterators and generators](https://jakearchibald.com/2017/async-iterators-and-generators/) +* [Here are examples of everything new in ECMAScript 2016, 2017, and 2018](https://medium.freecodecamp.org/here-are-examples-of-everything-new-in-ecmascript-2016-2017-and-2018-d52fa3b5a70e) + * [Hacker news comments](https://news.ycombinator.com/item?id=16743765) \ No newline at end of file diff --git a/src/main/md/Javascript/Modern_JS.md b/src/main/md/Javascript/Modern_JS.md new file mode 100644 index 00000000..faff3bf1 --- /dev/null +++ b/src/main/md/Javascript/Modern_JS.md @@ -0,0 +1,2 @@ +* [Cross Stitching: Elegant Concurrency Patterns for JavaScript](https://www.youtube.com/watch?v=726eZyVtC0Y) +* [IIAFEs - ES8 Immediately invoked async function expression](https://stackoverflow.com/questions/40745153/es8-immediately-invoked-async-function-expression) \ No newline at end of file diff --git a/src/main/md/Javascript/common_js_amd.md b/src/main/md/Javascript/common_js_amd.md new file mode 100644 index 00000000..4c89cf19 --- /dev/null +++ b/src/main/md/Javascript/common_js_amd.md @@ -0,0 +1,13 @@ +# CommonJS +* Initial goal was to create standard server side library for Javascript +* If standard library is required, there should be modular way to package and load them, so CommonJS also covers module portion of it +* It also specified list of libraries (packages) like binary, encodings, io, fs, sockets, event-queue, worker and console along with modules + + +# RequireJS +* Despite CommonJs solved modular issue, it assumed everything is on server, synchronous loading, hence didn't work for browser +* AMD specification tried to solve it, and requirejs is common implementation being used + + +# http://wiki.commonjs.org/wiki/CommonJS +# https://stackoverflow.com/questions/16521471/relation-between-commonjs-amd-and-requirejs \ No newline at end of file diff --git a/src/main/md/dailyJavascript.md b/src/main/md/Javascript/dailyJavascript.md similarity index 70% rename from src/main/md/dailyJavascript.md rename to src/main/md/Javascript/dailyJavascript.md index 62512e3d..24d44b87 100644 --- a/src/main/md/dailyJavascript.md +++ b/src/main/md/Javascript/dailyJavascript.md @@ -1,208 +1,239 @@ -* Array with 10 numbers - Array.from(Array(10).keys()) -* Goto https://www.omgeo.com/documentation/?p=alert And run below script to download documents.. -$('a[target="_blank"]').filter((index,tag) => tag.href.contains("documentation/D")).map((index, tag) => console.log(tag.href)) - -* [Download details from google result](https://github.com/mohanmca/MohanLearningGround/blob/master/src/main/js/google/tools/rip_google_results.js) - -* https://gist.github.com/TomyJaya/6468ce42e43b8e0dad20 - - -* Download an URL to a file using Javascript code -``` -function downloadURI(uri, name) { - var link = document.createElement("a"); - link.download = name; - link.href = uri; - document.body.appendChild(link); - link.click(); - document.body.removeChild(link); - delete link; -} -``` -* Download an URL to a variable -```Javascript -/* Download to a variable */ -function download(url) { - xmlhttp=new XMLHttpRequest() - xmlhttp.open("GET", url, false) - xmlhttp.send() - return xmlhttp.responseText -} -let content = download("https://gist.github.com/mohanmca/5816e30d911620c983302b13d7a187b4") -``` - -* Parse html text content into dom -```Javascript -parser = new DOMParser() -let contentDom = parser.parseFromString(content, "text/xml") -``` -* Add script to download data -```Javascript -var result = $.get("http://localhost:9000/json/search/searchText", {}).done( - function(response) { - return response.data - }).done(function(result) { - console.log(result.results) - return result.results; -}); - -```Javascript - -* Add script to chrome console -```Javascript -function addScript(scriptUrl) { - var script = document.createElement('script'); - script.type = 'text/javascript'; - script.src = scriptUrl; - document.head.appendChild(script); -} -``` - -```Javascript -addScript('https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.5.1/lodash.js') -_.filter([1,2,3], x => x%2 == 0) -addScript('https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.4.2/Rx.js') -Rx.Observable.range(1,3) -``` - -```Javascript -var jsdom = require('jsdom') -jsdom.env({ - url: "http://news.ycombinator.com/", - scripts: ["http://code.jquery.com/jquery.js"], - done: function (err, window) { - var $ = window.$; - console.log("HN Links"); - $("td.title:not(:last) a").each(function() { - console.log(" -", $(this).text()); - }); - } -}); -``` - -Steps to create ES2016 project on node - -* Ensure following two files are there in root -* webpack.config.js -* .bablerc - -//Update node -npm update -g -npm cache clean -npm install --save-dev babel-core babel-loader babel-preset-react -npm install -g webpack -webpack -w //watch file and transform -webpack -p //ship to production - -//Simple bable rc -{ - "presets": [ - "react" - ] -} - -```Javascript -//webpack.config.js -// In webpack.config.js -var HtmlWebpackPlugin = require('html-webpack-plugin') -var HTMLWebpackPluginConfig = new HtmlWebpackPlugin({ - template: __dirname + '/app/index.html', - filename: 'index.html', - inject: 'body' -}); -module.exports = { - entry: [ - './app/index.js' - ], - module: { - loaders: [{ - test: /\.coffee$/, - exclude: /node_modules/, - loader: "coffee-loader" - }, { - test: /\.js$/, - exclude: /node_modules/, - loader: "babel-loader" - }] - }, - output: { - filename: "index_bundle.js", - path: __dirname + '/dist' - }, -} -``` - -```Javascript -var packages = [] -var nodes = document.getElementsByClassName("css-truncate-target") -for(var i=0; i< nodes.length;i++){ if(nodes[i].children[0]) packages.push(nodes[i].children[0].text) } -packages.map( _ => "apm install " + _).join("\n") -"apm install " + packages.join(", ") -``` - - -* Copy list of links from google chrome - -```Javascript -const onlyUnique = (value, index, self) => self.indexOf(value) === index -var container = document.evaluate('//*[@id="rso"]/div/div', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null) -var results = Array.from(container.singleNodeValue.children) -var resultLinks = results.map(childNode => document.evaluate('div/div/h3/a', childNode, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null)) -resultLinks.map( link => link.singleNodeValue.href) - -var result = resultLinks.map(x => x.singleNodeValue.href).filter(onlyUnique).join("\r\n") -copy(result) -``` - - -* Copy list of links from ycombinator articles -```Javascript -// Navigate to - https://news.ycombinator.com/item?id=15154903 -var container = document.evaluate('//a', document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE , null) -var items = Array.from(Array(container.snapshotLength).keys()) -var _links = items.map(i => container.snapshotItem(i).innerHTML).filter(text => text.indexOf("http")!=-1) -var links = _links.sort().filter((e,i,a) => a.indexOf(e)==i) -console.log(links.join("\n")) -``` - -* https://atom.io/packages/list?direction=desc&sort=stars -* https://atom.io/packages/list?direction=desc&sort=downloads - - -let links = Array.from(document.getElementsByTagName("a")).filter(link => link.href.endsWith("pdf")).map(link => link.href) - -function downloadURI(uri) { - var link = document.createElement("a"); - link.href = uri; - document.body.appendChild(link); - link.click(); - delete link; -} - - -### How to extract youtube motivational speech text - -* https://www.youtube.com/watch?v=I22Lf0xF0UE and skip advertisement -* find "...More" button, and click that button and select "Transcript" -* Select english as language -* press f12 > select console -* paste below lines of code, and get transcript. - - -```Javascript -{ - let transcriptLines = Array.from(document.getElementById("transcript-scrollbox").childNodes) - let text = transcriptLines.map(text => text.childNodes[1].innerHTML) - let result = text.filter(text => text.indexOf("MUSIC") == -1) - console.log(result) -} -``` - - -```Javascript -{ - let transcript = Array.from(document.getElementsByClassName("ytd-transcript-renderer")).filter(element => element.id == "body")[0].innerText - let result = transcript.split("\n").filter(text => text.indexOf(":") == -1).filter(text => text.toLocaleLowerCase().indexOf("music") == -1) - console.log(result.join("\n")) -} +* Array with 10 numbers - [...Array(10).keys()] +* Goto https://www.omgeo.com/documentation/?p=alert And run below script to download documents.. +$('a[target="_blank"]').filter((index,tag) => tag.href.contains("documentation/D")).map((index, tag) => console.log(tag.href)) + +* [Download details from google result](https://github.com/mohanmca/MohanLearningGround/blob/master/src/main/js/google/tools/rip_google_results.js) + +* https://gist.github.com/TomyJaya/6468ce42e43b8e0dad20 + + +* Convert HTML Table to JSON +``` +http://singapores100.com/Ranking.aspx +var table = document.getElementById("ranking") +console.log(hTableToJson(table)) + +function hTableToJson(table) { + var rows = [...table.rows] + var content = rows.map(row => Array.from(row.cells).map(cell => cell.innerText)) + var headers = content[0] + var result = content.map(record => { var r = {}; record.map( (v,i,a) => r[headers[i]] = v.replace(/^\s+|\s+$/g,'') ); delete r[""]; return r } ) + return JSON.stringify(result, null, 2); +} + +``` + +* Download an URL to a file using Javascript code +``` +function downloadURI(uri, name) { + var link = document.createElement("a"); + link.download = name; + link.href = uri; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + delete link; +} +``` +* Download an URL to a variable +```Javascript +/* Download to a variable */ +function download(url) { + xmlhttp=new XMLHttpRequest() + xmlhttp.open("GET", url, false) + xmlhttp.send() + return xmlhttp.responseText +} +let content = download("https://gist.github.com/mohanmca/5816e30d911620c983302b13d7a187b4") +``` + +* Parse html text content into dom +```Javascript +parser = new DOMParser() +let contentDom = parser.parseFromString(content, "text/xml") +``` +* Add script to download data +```Javascript +var result = $.get("http://localhost:9000/json/search/searchText", {}).done( + function(response) { + return response.data + }).done(function(result) { + console.log(result.results) + return result.results; +}); + +```Javascript + +* Add script to chrome console +```Javascript +function addScript(scriptUrl) { + var script = document.createElement('script'); + script.type = 'text/javascript'; + script.src = scriptUrl; + document.head.appendChild(script); +} +``` + +```Javascript +addScript('https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.5.1/lodash.js') +_.filter([1,2,3], x => x%2 == 0) +addScript('https://cdnjs.cloudflare.com/ajax/libs/rxjs/5.4.2/Rx.js') +Rx.Observable.range(1,3) +Rx.Observable.range(0, 10).scan((x,y) => x+y).subscribe(x => console.log(x)) +``` + +```Javascript +var jsdom = require('jsdom') +jsdom.env({ + url: "http://news.ycombinator.com/", + scripts: ["http://code.jquery.com/jquery.js"], + done: function (err, window) { + var $ = window.$; + console.log("HN Links"); + $("td.title:not(:last) a").each(function() { + console.log(" -", $(this).text()); + }); + } +}); +``` + +Steps to create ES2016 project on node + +* Ensure following two files are there in root +* webpack.config.js +* .bablerc + +//Update node +npm update -g +npm cache clean +npm install --save-dev babel-core babel-loader babel-preset-react +npm install -g webpack +webpack -w //watch file and transform +webpack -p //ship to production + +//Simple bable rc +{ + "presets": [ + "react" + ] +} + +```Javascript +//webpack.config.js +// In webpack.config.js +var HtmlWebpackPlugin = require('html-webpack-plugin') +var HTMLWebpackPluginConfig = new HtmlWebpackPlugin({ + template: __dirname + '/app/index.html', + filename: 'index.html', + inject: 'body' +}); +module.exports = { + entry: [ + './app/index.js' + ], + module: { + loaders: [{ + test: /\.coffee$/, + exclude: /node_modules/, + loader: "coffee-loader" + }, { + test: /\.js$/, + exclude: /node_modules/, + loader: "babel-loader" + }] + }, + output: { + filename: "index_bundle.js", + path: __dirname + '/dist' + }, +} +``` + +```Javascript +var packages = [] +var nodes = document.getElementsByClassName("css-truncate-target") +for(var i=0; i< nodes.length;i++){ if(nodes[i].children[0]) packages.push(nodes[i].children[0].text) } +packages.map( _ => "apm install " + _).join("\n") +"apm install " + packages.join(", ") +``` + + +* Copy list of links from google chrome + +```Javascript +const onlyUnique = (value, index, self) => self.indexOf(value) === index +var container = document.evaluate('//*[@id="rso"]/div/div', document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null) +var results = Array.from(container.singleNodeValue.children) +var resultLinks = results.map(childNode => document.evaluate('div/div/h3/a', childNode, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null)) +resultLinks.map( link => link.singleNodeValue.href) + +var result = resultLinks.map(x => x.singleNodeValue.href).filter(onlyUnique).join("\r\n") +copy(result) +``` + + +* Copy list of links from ycombinator articles - Hacker news +```Javascript +// Navigate to - https://news.ycombinator.com/item?id=15154903 or https://news.ycombinator.com/item?id=16745042 +const container = document.evaluate('//a', document, null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE , null) +const items = Array.from(Array(container.snapshotLength).keys()) +const _links = items.map(i => container.snapshotItem(i).innerHTML).filter(text => text.indexOf("http")!=-1) +const links = _links.sort().filter((e,i,a) => a.indexOf(e)==i) +console.log("Unique links \n" + links.join("\n")) +let map = {} +_links.sort().map(link => link.substring(0,100)).forEach(link => map[link] = (map[link] || 0) + 1) +Object.entries(map).sort(kv => kv[1]).map(kv => kv[1].toString().padEnd(4) + kv[0]).join("\n") + + +console.log(links.join("\n")) +``` + +* https://atom.io/packages/list?direction=desc&sort=stars +* https://atom.io/packages/list?direction=desc&sort=downloads + +```Javascript +let links = [...document.getElementsByTagName("a")].filter(link => link.href.endsWith("pdf")).map(link => link.href) + +function downloadURI(uri) { + var link = document.createElement("a"); + link.href = uri; + document.body.appendChild(link); + link.click(); + delete link; +} +``` + +### How to extract youtube motivational speech text + +* https://www.youtube.com/watch?v=I22Lf0xF0UE and skip advertisement +* find "...More" button, and click that button and select "Transcript" +* Select english as language +* press f12 > select console +* paste below lines of code, and get transcript. + + +```Javascript +{ + let transcriptLines = [...document.getElementById("transcript-scrollbox").childNodes] + let text = transcriptLines.map(text => text.childNodes[1].innerHTML) + let result = text.filter(text => text.indexOf("MUSIC") == -1) + console.log(result) +} +``` + + +```Javascript +{ + let transcript = [...document.getElementsByClassName("ytd-transcript-renderer")].filter(element => element.id == "body")[0].innerText + let result = transcript.split("\n").filter(text => text.indexOf(":") == -1).filter(text => text.toLocaleLowerCase().indexOf("music") == -1) + console.log(result.join("\n")) +} +``` + +## dowload MIT lecture notes +* navivate to https://ocw.mit.edu/courses/mathematics/18-s096-topics-in-mathematics-with-applications-in-finance-fall-2013/lecture-notes/ +* F12 for dev-console +``` +let links = Array.from(document.getElementsByTagName("a")) +let lectureNotes = links.filter(link => link.href.indexOf(".pdf")!=-1).map(link => link.href) ``` \ No newline at end of file diff --git a/src/main/md/Javascript/javascript_youtube.md b/src/main/md/Javascript/javascript_youtube.md new file mode 100644 index 00000000..a517d9c2 --- /dev/null +++ b/src/main/md/Javascript/javascript_youtube.md @@ -0,0 +1,2 @@ +* Barnacules Nerdgasm - https://www.youtube.com/channel/UC29ju8bIPH5as8OGnQzwJyA - https://www.youtube.com/watch?v=JaMCxVWtW58 +* diff --git a/src/main/md/Javascript/node.md b/src/main/md/Javascript/node.md new file mode 100644 index 00000000..ddecc26b --- /dev/null +++ b/src/main/md/Javascript/node.md @@ -0,0 +1,96 @@ +* Every time once node is installed or upgraded + * npm install -g yarn babel-cli webpack webpack-cli nodemon + * npm install --save-dev babel-preset-env + * yarn add -D webpack-dev-server + +* Generate project with webpack and babel + * ```bash + npm install -g yo + npm install -g generator-simple-webpack + yo simple-webpack + ``` +* NPM version + * ^ - As long as major version is same, it would update latest version - Example: ^4.minor.patch (don't care minor and patch version) + * ~ - More restrictive - As long as major version is same, it would update latest version - Example: ~4.14.y (don't care patch version, but retains minor and major) + +* NPX is not available on windows when Node is installed using NVM + * npm i -g npx + +* window RefError when using webpack for node project + * target: "node" and remove other loaders related to html + +* Raw babel compiler + * npx babel src --out-dir dist + +* (node:5904) UnhandledPromiseRejectionWarning: Error: Evaluation failed: ReferenceError: _toConsumableArray is not defined + * var rows = [...table.rows] was changed to var rows = Array.from(table.rows) + +* TypeError: (intermediate value)(intermediate value)(...) is not a function + +* Never mix the logic of extracting data from dom-node and extracting dom-node + * Extract dom node + * Finally extract data from dom node + +* babel-node is buggy, always try to use node + +## NVM +```bash +nvm ls-remote +nvm list available +nvm install 10.15.0 +nvm use 10.15.0 +``` + +## node.js other tools +``` +npm instal -g chalk +npm instal -g morgan +npm instal -g nodemon +npm instal -g eslint && eslint --init +node_modules\.bin\eslint app.js --fix +``` + +## node debug module +```javascript +npm install debug +set DEBUG=* & node app.js +set DEBUG=app & node app.js +app.use(morgan('combined')); +app.use(morgan('tiny')); +``` + + +## express.js notes +* without path module, how to serve html file. File path is problematic +* Ensure following toolings are proper in express.js project + * nodemon, environment variable for port, npm start, ES6, ESLINT + * morgan is sample middleware + * Simplest middleware + ```js + app.use((req, res, next)={ + console.log("Middleware!"); + next(); + }) + ``` + +## webpack + * [Compiler plugin](https://github.com/webpack/docs/wiki/plugins) + * webpack --watch src\shorten.js dist\app.js + +## VSCODE +* how-to-i-hide-node_modules-files-from-the-sidebar-in-visual-studio-code + * File > Preferences > Settings > "Workspace Settings" +```json + "settings": { + "files.exclude": { + "node_modules/": true + } + } +``` + +## PUPPETEER +* await page.addScriptTag({ path: './node_modules/varname/build/varname.js' }); +* Try to avoid raising events, but manipulate dom if possible instead of events + +## Reference +* [Node Green](http://node.green) \ No newline at end of file diff --git a/src/main/md/Javascript/puppeteer.js b/src/main/md/Javascript/puppeteer.js new file mode 100644 index 00000000..e69de29b diff --git a/src/main/md/Javascript/react_js.md b/src/main/md/Javascript/react_js.md new file mode 100644 index 00000000..db4d7816 --- /dev/null +++ b/src/main/md/Javascript/react_js.md @@ -0,0 +1,16 @@ +# ReactJS + +```Javascript +npm install -g create-react-app +npx create-react-app my-app +cd my-app +npm install --save react-bootstrap-typeahead +npm install --save react-bootstrap +npm start +``` + +## Hooks +* [React as a UI Runtime](https://overreacted.io/react-as-a-ui-runtime/) +* [Making Sense of React Hooks](https://medium.com/@dan_abramov/making-sense-of-react-hooks-fdbde8803889) +* [Why Do React Hooks Rely on Call Order?](https://overreacted.io/why-do-hooks-rely-on-call-order/#flaw-7-cant-pass-values-between-hooks) +* [Use Hooks](https://usehooks.com/) \ No newline at end of file diff --git a/src/main/md/Javascript/restful_api.http.md b/src/main/md/Javascript/restful_api.http.md new file mode 100644 index 00000000..a7ccf4c9 --- /dev/null +++ b/src/main/md/Javascript/restful_api.http.md @@ -0,0 +1,7 @@ +## List of restful api's for development +* https://api.github.com/users +* https://gateway.reddit.com/desktopapi/v1/subreddits/javascript?sort=top&t=day +* https://en.wikipedia.org/api/rest_v1/page/summary/Indira +* https://en.wikipedia.org/w/api.php?action=opensearch&format=json&formatversion=2&search=java&namespace=0&limit=10&suggest=true +* https://restcountries.eu/rest/v2/all +* https://samples.openweathermap.org/data/2.5/weather?q=London,uk&appid=b6907d289e10d714a6e88b30761fae22 \ No newline at end of file diff --git a/src/main/md/Javascript/rxjs.md b/src/main/md/Javascript/rxjs.md new file mode 100644 index 00000000..c6fc5acf --- /dev/null +++ b/src/main/md/Javascript/rxjs.md @@ -0,0 +1,178 @@ +### 4 problems with callback +* Call back hell +* Underyling concurrency is complex +* callback called more than once +* changes the error sequence (not like try-catch) + +### Event Handler +* Events are not handled as first-class value in handler, as handlers ignores return value of handler +* Can't compose two event handlers + +*Promise doesn't support series of events, only handles one events (and result)* + +### Reactive Streaming +* We can think of our streaming sequence as an array in which elements are separated by time instead of by memory (or Comma). +* Dealing with sequences gives us enormous power; we can merge, transform, or pass around Observables easily. +* Turn events we can’t get a handle on into a tangible data structure (sequence of observable), that’s as easy to use as an array. + + +###RX +* Create, Transform, Compose and React to streams of data +* http://rxmarbles.com/#findIndex +* RxJS provides plenty of other operators (like create) that make it easy to create Observables for common sources. +* https://www.learnrxjs.io/operators/creation/create.html + +###Usage +npm install rx + +```Javascript +var Rx = require(​'rx'​); + +Rx.Observable.just(​'Hello World!'​).subscribe(​function​(value) { + console.log(value); +}); +``` + + + +```Javascript +var​ clicks = 0; +​ document.addEventListener(​'click'​, ​function​ registerClicks(e) { +​ ​if​ (clicks < 10) { +​ ​if​ (e.clientX > window.innerWidth / 2) { +​ console.log(e.clientX, e.clientY); +​ clicks += 1; +​ } +​ } ​else​ { +​ document.removeEventListener(​'click'​, registerClicks); +​ } +​ }); + +//vs + + Rx.Observable.fromEvent(document, 'click') + .filter(function(c) { return c.clientX > window.innerWidth / 2; }) + .take(10) + .subscribe(function(c) { console.log(c.clientX, c.clientY) }) + +``` + +### Equation between Observer, Iterator and Observable +Iterator pattern + +Iterator.hasNext() +Iterator.next() +https://media.pragprog.com/titles/smreactjs/code/ch1/iterator.js + + +Rx pattern = Observer pattern + Iterator pattern + +* The Observable sequence, or simply Observable is central to the Rx pattern. +* An Observable emits its values in order—like an iterator +* But instead of its consumers requesting the next value, the Observable “pushes” values to consumers as they become available. +* It has a similar role to the Producer’s in the Observer pattern: emitting values and pushing them to its listeners. + +*In Rx, Observable is one Data Type to Rule Them All (Example: Merge events with Array, convert Array into Obserable)* + + +### Difference with traditional Observer and Observable + +* An Observable doesn’t start streaming items until it has at least one Observer subscribed to it. +* Like iterators, an Observable can signal when the sequence is completed. + +**Observables don’t do anything until at least one Observer subscribes to them. ** + + +#### How to create Obserable +```Javascript +​ ​var​ observer = Rx.Observer.create( +​ ​function​ onNext(x) { console.log(​'Next: '​ + x); }, +​ ​function​ onError(err) { console.log(​'Error: '​ + err); }, +​ ​function​ onCompleted() { console.log(​'Completed'​); } +​ ); +``` + +```Javascript +​ ​function​ get(url) { +​ ​return​ Rx.Observable.create(​function​(observer) { +​ ​// Make a traditional Ajax request​ +​ ​var​ req = ​new​ XMLHttpRequest(); +​ req.open(​'GET'​, url); +​ +​ req.onload = ​function​() { +​ ​if​ (req.status == 200) { +​ ​// If the status is 200, meaning there have been no problems,​ +​ ​// Yield the result to listeners and complete the sequence​ +​ observer.onNext(req.response); +​ observer.onCompleted(); +​ } +​ ​else​ { +​ ​// Otherwise, signal to listeners that there has been an error​ +​ observer.onError(​new​ Error(req.statusText)); +​ } +​ }; +​ +​ req.onerror = ​function​() { +​ observer.onError(​new​ Error(​"Unknown Error"​)); +​ }; +​ +​ req.send(); +​ }); +​ } +​ +​ ​// Create an Ajax Observable​,Nothing happens without subscriber, No request sent to remote server +​ ​var​ test = get(​'/api/contents.json'​); + +​​ ​//With Subscriber, real usage.. + Rx.DOM.get(​'/api/contents.json'​).subscribe( +​ ​function​ onNext(data) { console.log(data.response); }, +​ ​function​ onError(err) { console.error(err); } +​ ); +``` + +```Javascript +​ ​var​ Rx = require(​'rx'​); ​// Load RxJS​ +​ ​var​ fs = require(​'fs'​); ​// Load Node.js Filesystem module​ +​ +​ ​// Create an Observable from the readdir method​ +​ ​var​ readdir = Rx.Observable.fromNodeCallback(fs.readdir); +​ +​ ​// Send a delayed message​ +​ ​var​ source = readdir(​'/Users/sergi'​); +​ +​ ​var​ subscription = source.subscribe( +​ ​function​(res) { console.log(​'List of directories: '​ + res); }, +​ ​function​(err) { console.log(​'Error: '​ + err); }, +​ ​function​() { console.log(​'Done!'​); }); +``` + +```Javascript +var counter = Rx.Observable.interval(1000); +var disposable = counter.subscribe( (i) => console.log(i) ) +disposable.dispose() +```` + +### From Promise to RX +```Javascript +var p = Promise.resolve(3) +Rx.Observable.fromPromise(p).subscribe( i => console.log(i)) +``` + +## Three choices of error Handling +* onError, Survives only one error, onComplete will no be invoked +* catch operator, Multiple error can be handled, but error will be part of actual data (instead of onError) +* retry operator, N number of errors are tolerated, without input, it would become infinity + +```Javascript +//create promise that immediately rejects +const myBadPromise = () => new Promise((resolve, reject) => reject('Rejected!')); +//emit single value after 1 second +const source = Rx.Observable.timer(1000); +//catch rejected promise, returning observable containing error message +const example = source.flatMap(() => Rx.Observable + .fromPromise(myBadPromise()) + .catch(error => Rx.Observable.of(`Bad Promise: ${error}`)) + ); +//output: 'Bad Promise: Rejected' +const subscribe = example.subscribe(val => console.log(val)); +``` \ No newline at end of file diff --git a/src/main/md/LearningJournal/journal.md b/src/main/md/LearningJournal/journal.md new file mode 100644 index 00000000..1a9c7deb --- /dev/null +++ b/src/main/md/LearningJournal/journal.md @@ -0,0 +1,54 @@ +* "Date" - Source - "Course/page" - Started/InProgress/Completed/Todo +* "27-Jan-2019" - "Spring Security" - In Progress +* "24-Jan-2019" - "Developing Microservices and Mobile Apps with JHipster - Entity gneration, using Docker mongodb" - In Progress - "https://app.pluralsight.com/library/courses/play-by-play-developing-microservices-mobile-apps-jhipster/table-of-contents" +* "22-Jan-2019" - "Developing Microservices and Mobile Apps with JHipster - Security" - In Progress - "https://app.pluralsight.com/library/courses/play-by-play-developing-microservices-mobile-apps-jhipster/table-of-contents" +* "21-Jan-2019" - "Developing Microservices and Mobile Apps with JHipster - Security" - In Progress - "https://app.pluralsight.com/library/courses/play-by-play-developing-microservices-mobile-apps-jhipster/table-of-contents" +* "20-Jan-2019" - "Developing Microservices and Mobile Apps with JHipster" - In Progress - "https://app.pluralsight.com/library/courses/play-by-play-developing-microservices-mobile-apps-jhipster/table-of-contents" +* "20-Jan-2019" - "AdvancedDesignPatternforNoSQL" - Completed - "https://www.trek10.com/blog/dynamodb-single-table-relational-modeling/" +* "19-Jan-2019" - "Lecture 16 logstash job Kibana visualization" - Completed - "https://www.youtube.com/watch?v=imrKm6dV3NQ&t=1s" +* "18-Jan-2019" - "Use Logstash to load CSV into Elasticsearch" - Completed - "https://www.youtube.com/watch?v=rKy4sFbIZ3U" +* "17-Jan-2019" - PluralSight - "Centralized Logging with ElasticSearch Stack" - InProgress - https://app.pluralsight.com/library/courses/centralized-logging-elastic-stack/table-of-contents +* "16-Jan-2019" - PluralSight - "Build pipeline - Completed" - Completed - https://app.pluralsight.com/player?course=jenkins-2-getting-started + * Multiagen skipped, 15th was break in the project +* "14-Jan-2019" - PluralSight - "Build pipeline - NodeJs project" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "13-Jan-2019" - PluralSight - "Build pipeline - VirutalBox build-failure notitication" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "12-Jan-2019" - PluralSight - "Build pipeline - MailHog" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "11-Jan-2019" - PluralSight - "Build pipeline" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "10-Jan-2019" - PluralSight - "Testing and Continuous Integration" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "10-Jan-2019" - PluralSight - "Testing and Continuous Integration" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "09-Jan-2019" - PluralSight - "Creating application builds" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "08-Jan-2019" - PluralSight - "Getting Started with Jenkins 2" - InProgress - https://app.pluralsight.com/player?course=jenkins-2-getting-started +* "07-Jan-2019" - PluralSight - "Building Web Applications with Node.js and Express 4.0 (UPDATE)" - InProgress - https://app.pluralsight.com/library/courses/nodejs-express-web-applications-update (Module 8) (GoodReads Service) +* "06-Jan-2019" - PluralSight - "Building Web Applications with Node.js and Express 4.0 (UPDATE)" - InProgress - https://app.pluralsight.com/library/courses/nodejs-express-web-applications-update (Module 7) (MongoDb) +* "05-Jan-2019" - PluralSight - "Building Web Applications with Node.js and Express 4.0 (UPDATE)" - InProgress - https://app.pluralsight.com/library/courses/nodejs-express-web-applications-update (Module 7) (MySQL) +* "04-Jan-2019" - PluralSight - "Building Web Applications with Node.js and Express 4.0 (UPDATE)" - InProgress - https://app.pluralsight.com/library/courses/nodejs-express-web-applications-update (Module 6) +* "03-Jan-2019" - PluralSight - "Building Web Applications with Node.js and Express 4.0 (UPDATE)" - InProgress - https://app.pluralsight.com/library/courses/nodejs-express-web-applications-update (Module 5) +* "02-Jan-2019" - PluralSight - "Building Web Applications with Node.js and Express 4.0 (UPDATE)" - InProgress - https://app.pluralsight.com/library/courses/nodejs-express-web-applications-update (Module 1 to Module 4) +* "01-Jan-2019" - PluralSight - "Node.js: Introduction to Node.js, Events, and Streams" - Completed + + + +## In Queue +* JHipster +* Spring Security +* [Udemy Kubernetes course](https://www.udemy.com/learn-devops-the-complete-kubernetes-course/?couponCode=KUBERNETES_GITHUB) +* Liquibase +* Kafka +* Python +* Serverless + +## In Queue (Financial) +* Cassandra data modellling + + +## Follow-up/recommended part of courses +* https://github.com/substack/stream-handbook +* DynamoDB Best practices + +# Delayed +* Virtual box - network - able to work with Nat + +# Completed +* Jenkins course +* [https://github.com/mohanmca/MohanLearningGround/blob/master/src/main/md/Aws/AdvancedDesignPatternforNoSQL.md](https://www.trek10.com/blog/dynamodb-single-table-relational-modeling/) + diff --git a/src/main/md/MachineLearning/MLTrend.md b/src/main/md/MachineLearning/MLTrend.md new file mode 100644 index 00000000..d1e2eb93 --- /dev/null +++ b/src/main/md/MachineLearning/MLTrend.md @@ -0,0 +1 @@ +* GAN - Generative Adversial Network \ No newline at end of file diff --git a/src/main/md/MachineLearning/ML_Competition.md b/src/main/md/MachineLearning/ML_Competition.md new file mode 100644 index 00000000..f9b2532a --- /dev/null +++ b/src/main/md/MachineLearning/ML_Competition.md @@ -0,0 +1,3 @@ +* Ridge regression model + * using l2 regularized model "models_ridge.py" + \ No newline at end of file diff --git a/src/main/md/MachineLearning/ML_Courses.md b/src/main/md/MachineLearning/ML_Courses.md new file mode 100644 index 00000000..3332587d --- /dev/null +++ b/src/main/md/MachineLearning/ML_Courses.md @@ -0,0 +1,3 @@ +## ML Courses +* https://developers.google.com/machine-learning/crash-course/ml-intro +* https://course.fast.ai/ \ No newline at end of file diff --git a/src/main/md/MachineLearning/ML_preparation.md b/src/main/md/MachineLearning/ML_preparation.md new file mode 100644 index 00000000..b242a632 --- /dev/null +++ b/src/main/md/MachineLearning/ML_preparation.md @@ -0,0 +1,28 @@ +* Feature engineering is often the most impactful thing you can do to improve quality of models and a place where I often see beginners (and experts for that matter) get stuck. +* Feature engineering takes up about 75% of the time +* Good ML tools live or die with good feature engineering +* There are various feature engineering and feature extraction techniques. Filter methods, wrapper methods, and embedded methods. Principle component analysis, autoencoding, variance analysis, linear discriminant analysis, Gini index, genetic algorithms, etc -- the feature selection process will depend on the dataset, the problem domain, the analysis algorithm you ultimately use, etc. + + + +* How did you figure out what algorithm(s) are appropriate? + * It depends on the problem domain. + * Discrete or continuous data? + * Categorical features, numeric features, features as bitmasks. + * Do you need a probabilistic outcome? Etc. + * Generally you start with the easiest algorithms in your toolbox to see how viable they are. + * For a classification task I'll almost always start with a naive Bayes classifier (if the data allows) and/or a random forest and see how they perform. + * If the problem domain is highly non-linear you might start with a support vector or kernel method. + * Neural network could be a last resort, asmost classification problems can be solved to a high accuracy much more simply. + * Solution steps + * What's the distribution of each dimension? + * Are the relationships linear, nonlinear, clustered, dispersed, logarithmic, etc. + +## How to Practice? +* Find a dataset +* Choose a predictor +* filter, clean and massage your data till you get better metrics/understanding. +* Rinse, repeat on many different datasets and problems, and you'll know how to do this. + +## Reference +* [Machine Learning Crash Course](https://news.ycombinator.com/item?id=16493489) \ No newline at end of file diff --git a/src/main/md/MachineLearning/ML_todo.md b/src/main/md/MachineLearning/ML_todo.md new file mode 100644 index 00000000..61ddbf93 --- /dev/null +++ b/src/main/md/MachineLearning/ML_todo.md @@ -0,0 +1,2 @@ +* https://blog.floydhub.com/gentle-introduction-to-text-summarization-in-machine-learning/ +# [Logistic Regression from Bayes' Theorem and Linear regression](https://www.countbayesie.com/blog/2019/6/12/logistic-regression-from-bayes-theorem) diff --git a/src/main/md/MachineLearning/MachineLearning.md b/src/main/md/MachineLearning/MachineLearning.md new file mode 100644 index 00000000..127b228f --- /dev/null +++ b/src/main/md/MachineLearning/MachineLearning.md @@ -0,0 +1,152 @@ +## Getting Started with Machine Learning in Python - Rudy Lai + +## Topics +* Environment to create ML models +* Prepare your datasets for ML with data cleaning +* Putting data into the right categories (classification - Not continious) +* Regression is opposite of classification (Predication - Continious) +* UnSupervised Learning: Segmenting Groups and Detecting Outliers +* Modelling complex relationships with Nonlinear models + +## Rule based vs ML +* Rule based functions are table driven, developed by developer +* ML functions are complex, they are not defined by developer +* Machine derives function itself based on model and data +* How helicopter should fly can't be defined in single function +* ML = data + model + feedback_loop +* ML models are simplification are real-life +* feedback_loop is feeding the error back to system as a feedback +* Without data, there is no ML + +## Tasks +* ML is all about predicting things +* Regression is the task of predicting numbers +* Classification is the task of predicting labels + +## Why python +* Lots of modules related to data +* Interpretted - Feedback loop is faster in python due to REPL +* Large amount of ML modules are available + * Pandas - Ability to manage tabular data + * Scikit-Learn + * Jupyter Notebook - Combine normal document with Python code +* 1000 data per class should be present + +## Standardization and Normalization +* Machine Learning would work well for normally distributed data +* Mean of zero and standard deviation of 1 - should be there +* We can normalize data by converting into different units +* Kernel of SVM and L1 & L2 regularizer requires data to be normally distributed + +## Scklearn Toy Datasets +* load_boston([return_X_y]) Load and return the boston house-prices dataset (regression). +* load_iris([return_X_y]) Load and return the iris dataset (classification). +* load_diabetes([return_X_y]) Load and return the diabetes dataset (regression). +* load_digits([n_class, return_X_y]) Load and return the digits dataset (classification). +* load_linnerud([return_X_y]) Load and return the linnerud dataset (multivariate regression). +* load_wine([return_X_y]) Load and return the wine dataset (classification). +* load_breast_cancer([return_X_y]) Load and return the breast cancer wisconsin dataset (classification). + +## Supervised Learning +## UnSupervised Learning +## Reinforcement Learning + + +## Seaborn and matplotlib +* Seaborn is a library for making statistical graphics in Python. It is built on top of matplotlib and closely integrated with pandas data structures. +* Matplotlib is a Python 2D plotting library which produces publication quality figures in a variety of hardcopy formats and interactive environments across platforms. Matplotlib can be used in Python scripts, the Python and IPython shells, the Jupyter notebook, web application servers, and four graphical user interface toolkits. + +## False Positive, False Negative +||*Male*|*Female*| +|-|-|-| +|**Male**|True Positive|False Positive| +|**Female**|False Negative|True Negative| +*X-axis - Gold Standard*/*Y-axis - Algorithm Classification* +* precision = tp / (tp + fp) +* recall = tp / (tp + fn) + * The precision is intuitively the ability of the classifier not to label as positive a sample that is negative. + * The recall is intuitively the ability of the classifier to find all the positive samples. + * F-beta score can be interpreted as a weighted harmonic mean of the precision and recall, where an F-beta score reaches its best value at 1 and worst score at 0. + +* Logistic Function or Sigmoid Function +* SigmoidFunction: x -> [0.0-1.0] (Converts any x into 0.0 to 1.0) +* Why we need them? Linear Function should be converted into probability +* Logistic regression + * Linear model + sigmoid function +* Linear regression + * Linear regression = simplest linear model (additive model for all features) + * Effect of features are linear on target use linear regression + * Linear regression = logistic regression - sigmoid function +* Least Square + * Why we sqaure? + * Smaller errors are amplified + * Bigger errors are extremly amplified + * All errros become +ve + +# UnSupervised learning +* Segmenting Groups and Detecting Outliers +* Finding groups automatically with k-means clustering +* Reducing the number of variables in our data with PCA +* Smooting out our histograms with kernel density estimation +* When there is no answer like Y/y, better to use UnSupervised learning. +* We can use UnSupervised learning as input into supervised learning +* Dimensionality reduction + * Vector, Metrics (2 dimension), Tesnor (3 dimension, time could be one dimension) + * We can compress more than one dimension into one dimension without loosing any details using dimension reduction + +# What is PCA? +* Principal component analysis +* PCA does dimensionality reduction by isoloating components of data +* Especialy components are linear combinations of features +* Finds the principal component +* Removes noise or tail are keeps the core + +# What is kernel density estimation +* It is density of data points within particular set of values +* In Kernal density estimation, we use kernel to do density estimation +* If there is no + +## Non linear models +* Features and target is not linearly mapped +* Signals could be polynomial, cubic or quadradic relationship between features and target +* Explainable models - Decision Trees +* Automatic feature engieering - Support Vector Machine +* Dealing with non-linear relationship with polynomial regression +* Reducing the number of learned rules with regularization + +## Decision Trees +* Allows to stack rules + * Example rules for IRIS data + * If sepal length is less than 3, ignore and use cepal length alone + * If sepal length is grether than 3, use and cepal length + +## Support Vector Machine - SVM +* Vector is mathematical way of saying a line (could be curved) +* Find data-point that supports Vector +* Further to SVM is finding Kernel + * Kernel would make SVM (curved line) + * Kernel Breaks the linearity of classifier +* SVM score higher the better +* How good model explains the data is the score +* SVM can use the kernel trick to find the best transformation of data points of multiple non-linear features and find decision based on it + + +# Polynomial +* Squared feature/Cubic Feature/Quardratic +* Order 5, 6 or higer + + +# Regularization +* When model doesn't require to learn too many co-efficient in one go +* When model should be avoided with complex co-efficient +* It is to avoid over-fit the data +* L1, L2 and alpha +* Avoid noise +* Lasso is L1 regularization, try to make features as zeros + + + +## References +* [A visual introduction to machine learning](http://www.r2d3.us/visual-intro-to-machine-learning-part-1/) +* [What is standarscaler](http://benalexkeen.com/feature-scaling-with-scikit-learn/) +* [Euclidean norm or Square Norm](https://en.wikipedia.org/wiki/Norm_(mathematics)#Euclidean_norm) diff --git a/src/main/md/Management/Leadership_and_rulers.md b/src/main/md/Management/Leadership_and_rulers.md new file mode 100644 index 00000000..7893baca --- /dev/null +++ b/src/main/md/Management/Leadership_and_rulers.md @@ -0,0 +1 @@ +* [The Rules for Rulers](https://www.youtube.com/watch?v=rStL7niR7gs) \ No newline at end of file diff --git a/src/main/md/Management/MangeUp.md b/src/main/md/Management/MangeUp.md new file mode 100644 index 00000000..c94d90a7 --- /dev/null +++ b/src/main/md/Management/MangeUp.md @@ -0,0 +1,3 @@ +* [Design Patterns for Managing Up](https://queue.acm.org/detail.cfm?id=3308563) +* [The Gervais Principle, Or The Office According to “The Office”](https://www.ribbonfarm.com/2009/10/07/the-gervais-principle-or-the-office-according-to-the-office/) +* [Taking a Fence Down](https://www.chesterton.org/taking-a-fence-down/) \ No newline at end of file diff --git a/src/main/md/Math/FinMath/FinancialMathematics.md b/src/main/md/Math/FinMath/FinancialMathematics.md new file mode 100644 index 00000000..f342c8f2 --- /dev/null +++ b/src/main/md/Math/FinMath/FinancialMathematics.md @@ -0,0 +1,20 @@ + +# Financial Mathematics by Giuseppe Campolieti; Roman N. Makarov +* https://www.wlu.ca/academics/faculties/faculty-of-science/faculty-profiles/giuseppe-joe-campolieti/index.html +* https://www.wlu.ca/academics/faculties/faculty-of-science/faculty-profiles/roman-makarov/index.html +* https://www.wlu.ca/programs/science/undergraduate/financial-mathematics-ba/index.html +* Fundamental subjects + * calculus + * analysis + * linear algebra + * differential equations + * numerical analysis + * optimization + * probability theory and statistics + * stochastic processes + * stochastic calculus +* This book also provides a self-contained introduction to stochastic calculus and martingale theory, which are important cornerstones in quantitative finance. + +# Reference +* [Topics in Mathematics with Applications in Finance-Video](https://www.youtube.com/playlist?list=PLUl4u3cNGP63ctJIEC1UnZ0btsphnnoHR) +* [Topics in Mathematics with Applications in Finance](https://ocw.mit.edu/courses/mathematics/18-s096-topics-in-mathematics-with-applications-in-finance-fall-2013/) \ No newline at end of file diff --git a/src/main/md/Math/GameTheory.md b/src/main/md/Math/GameTheory.md new file mode 100644 index 00000000..d39431cc --- /dev/null +++ b/src/main/md/Math/GameTheory.md @@ -0,0 +1,115 @@ +* Game Theory + * Person's action depends on what others do or expected to do + * How people act when they are interconnected + * Strategic interaction + * Coperation vs Conflict + * Our action impacts others, and other's action impacts ours + * FT - Number guess game + * Reader picks a number zero and 100, winner is a contestant with a number closest to 2/3 of the average of all numbers entered in the contest + * Guessing game proves that there would be bubble even if everyone is rational + * Application + * Politics - Policy of rival party + * Economics - Pricing of rival firm, and competition, competitors product + * Biology - fighting for scarce resource aggresively, but not with powerful one +* Application + * Simultaneous moves + * Strategic interactions + * Payoff matrix +* Payoff + * Maximize happiness + * Maximize harm (competition) +* Nash equilibrium + * Each player does the best response possible to the choice of the other player + * Rational expectations + * Prisoner's dillema + * car theft and Hit and Run + * 1 x 1, 10 x 10 (both silent, both confess) + * 0 x 15, 15 x 0 (a confess and b guilty, b confesses and a guilty) + * For above pay off, both prisonars confesses + * Pareto efficiency +* Pareto efficiency + * Pareto efficiency couldn't be trusted when nuclear arm race happens between super power + * If nuclear weapon will not be used, better without producing them, but requires to dominant power +* Co-operative nash equilibrium + * Ash and Beth are room mate, both are not ready to doing dishes, both wants other to do the dish (do/clean dish) + * Pay off + * A:10, B:10, A:20, B: 08 (Neither does, Alice does the dish) + * A:08, B:20, A:14, B: 14 (Beth does, Both does) + * Here equilibrium is both does the dish +* Multiple nash equilibrium + * Amy and Bob like to *stay together*, in an event. + * (A:5*B:10, A:10*B:5) - Foot ball vs Dance + * For them staying together is more important. + * Assume they can't contact each other, If they need to make simultaneous move, there are two possibilities. + * Each would think others convenient and yield for others interest, It might leads to co-ordination failure + * When multiple-equilibrium, to avoid co-ordination failure, we can choose to use "Soical norm" or co-ordination device + * Every time when co-ordiantion failure occurs between Bob & Amy, They choose Bob way, that is norm for them. It avoids co-ordination failure. + * Or rely on first advertisement on radio between 4~5 (Co-ordination device) + + +* Focual point effect + * Culture and history can affect our rational behaviour + * Co-ordination device + * self-fulfilling expectations - Expectations which induce people to take actions which bring about the situation that is expected. + * When everyone tries to withadraw all of their money from bank, bank run would happen. They would run out of money + * Positive statements or actions by bankers or policymakers can backfire if people take them as sign of weakness +* Mixed-stratgey nash equilibrium + * rock, paper and scissors + * "Black Wednesday" - "16/09/1992" - pound collapsed + * George soros made billions + * Speculators randomize the timing of the attack (short-sell), if central bank tried to buy mark, it gave clue to short-sellers +* Expected profit and war of attrition + * If two store operating in the same smallville, only one has to survive, who stays long would wins + * if the expected profit between two option, one option is higher business would do that, if not, they can choose any of the two +* Evolutionary biology + * Hawk vs Dove + * Individual success and group failure + * Largest fish keep growing, but it has to fight with similar large fish, and evoultion would destry weakest among them and stronger would survive + * Two agressive lion confronts and creates damage among them + * Dove type vs Hawk Type (Hawk type always wins) + * Mathematically the two problems, the rational decision makers and and the problem of genetically conditioned animals subject to evoultionary forces are identical +* Sequential move games + * An entrepreneur opening a coffee shop in a corner place can observe which other shops are already there, and will consider which other may come + * Are there first mover advantage? + * Subgame-perfect Nash equilibrium + * Game tree expected payoff + * time inconsistency problem - In economics, dynamic inconsistency or time inconsistency is a situation in which a decision-maker's preferences change over time in such a way that a preference can become inconsistent at another point in time. + * My boss was accused of time inconsistency when he initially told us he wanted us to purchase our competitor, but then backtracked when the sale was about to go through. + * Commitment device - Collect collateral to elliminate time inconsistencies (Bank collects collateral, so that entrepreneur invests in safe project, and doesn't invest in risky projects) + * Commitment device doesn't work for poor - Grameen bank, microcredit for poor, Muhammed Yunus + * In nuclear war, how to enforce retaliation + * One leader initiate + * Delegate to multiple leaders to initiate retaliation + * Automate retaliation + * Doomsday device would automatically retaliate, but its purpose is not-to-initiate initial strike by opponent, The day we use, it alerady failed its purpose +* Incomplete information + * Asymmetric information + * Insurance companies work with asymmetric information. Car insurer knows how he drives, not insurance company + * Persistent employment + * hidden action - Most poor people earn more than minimum wage when they are working; their problem is not low wages. The problem comes when they are not working. - Joseph Stiglitz + * Warranties are trying to remove asymmetric information about quality before trying + * High level of advertising only be profitable for companies with high quality product + * Low quality with advertising - no repeat customers. + * High quality with advertising - repeat customers. + * Religious rituals are there to ensure "people are true believers" not pretending to get short term benefit + * Even team players are rational, team could be irrational + * Rational decision makers have transitive preference (A>B, B>C, i.e, A >C) + * for team even if (A>B, B>C), still A < C, because team is irrational and non-transitive + * Arrow%27s_impossibility_theorem - Without dictator group would always make bad choice + * If citi council has to choose one among three (Park > Recycling > School), They would choose wrong, one despire everyone independently vote something irrational + * Attempts to form social judgement by aggregating individual expressed preference always lead to the possibility of paradox + * In battle, if a soldier thinks they would win, and he assumes his actions are not matter, and fellow soldier believe they are going to loose... in-coperative army would loose + * + + +# Reference +* Thaler's Financial Times experiment (Guess number) +* [Keyness beauty contest](https://www.ft.com/content/6149527a-25b8-11e5-bd83-71cb60e8f08c) +* https://en.wikipedia.org/wiki/Richard_Thaler +* [Pareto efficiency](https://www.richmondfed.org/-/media/richmondfedorg/publications/research/econ_focus/2007/winter/pdf/jargon_alert.pdf) +* Rock Scissor Game +* [Football vs opera](https://en.wikipedia.org/wiki/Battle_of_the_sexes_(game_theory)) +* A Theory of Exit in Duopoly +* Tax evasion - mixed nash equilibrium +* https://en.wikipedia.org/wiki/Arrow%27s_impossibility_theorem +* "The game theory of applied economists" alias "The primer in Game theory" Robert Gibbon \ No newline at end of file diff --git a/src/main/md/Math/Kavin/fractions.html b/src/main/md/Math/Kavin/fractions.html new file mode 100644 index 00000000..f85b6a00 --- /dev/null +++ b/src/main/md/Math/Kavin/fractions.html @@ -0,0 +1,103 @@ + + + + + +
+ + + +


+ +
+ +
+


+ + +


+ +
+ + +Your browser does not support the HTML5 canvas tag. + + + + + diff --git a/src/main/md/Math/MathTricks/9_ticks_and_techniques.md b/src/main/md/Math/MathTricks/9_ticks_and_techniques.md new file mode 100644 index 00000000..dd102fc6 --- /dev/null +++ b/src/main/md/Math/MathTricks/9_ticks_and_techniques.md @@ -0,0 +1,33 @@ +### Is 3456 divisible by 9? + +* Yes, It is divisible by 3, 9 and 18 +* 3+4+5+6 = 18 - 9. Since 18 is divisble by 9 and 2, it is divisble by 18 + +--- +### Is 7832 divisible by 9? + +* No +* 7+8+3+2 = 20. It is not divible by 9 +--- +### What is the nearest number for 7832 that divisible by 9? + +* Remove all number that together forms 9 (7 and 2) +* Add remaining number - 8+3 = 11 +* Subtract them from 9. (11-9 = 2) +* Subtract that two from original 7832. 7830. +* Hence 7830 is divisble by 9 = 870 times 9 is = 7830 +--- +### How to find nearest number divisible by 9 (Example: 8765)? + +* Add each number with next number, when digit is bigger than 9, subtract 9 from it +* 8+7+6+5 = 6+6+5 = 3 + 5 = 8 +* Subtract 8 from the original = 8757 +--- +### 9 magic for two digit number + +* 10x+y - (x+y) = 9x + * 79 - (7+9) = 9*7 +* 10x+y - ((x+y)%9) = 9(x+1) + * 79 - ((7+9)%9) = 9*8 + * 79 - 7 = 9*8 + diff --git a/src/main/md/Math/MathUsingDiagrams_PhaseSpace.md b/src/main/md/Math/MathUsingDiagrams_PhaseSpace.md new file mode 100644 index 00000000..3eac7037 --- /dev/null +++ b/src/main/md/Math/MathUsingDiagrams_PhaseSpace.md @@ -0,0 +1 @@ +*[Phase Space Diagrams](https://www.acs.psu.edu/drussell/Demos/phase-diagram/phase-diagram.html) diff --git a/src/main/md/Math/Math_Common_MSC_Syllabus.md b/src/main/md/Math/Math_Common_MSC_Syllabus.md new file mode 100644 index 00000000..8089139d --- /dev/null +++ b/src/main/md/Math/Math_Common_MSC_Syllabus.md @@ -0,0 +1,28 @@ +# Common between Ignou and MU +* Probability and Statistics +* Differential Equations and Numerical Solutions +* Functional Analysis +* Algebra +* Real Analysis +* Complex Analysis + +# Unique +* Mechanics +* Topology +* Differential Geometry and Tensor Analysis +* Linear Algebra +* Mathematical Modelling + +* Indira Gandhi National Open University - M.Sc. (MATHEMATICS WITH APPLICATIONS IN COMPUTER SCIENCE) +* Madas University - M.Sc + +# Books +* Complex Variables and Applications by J. W. Brown and R. V. Churchill +* Functional Analysis by B. V. Limaye (New Age International (P) Ltd., 2nd Edition). +* I.N. Herstein. Topics in Algebra (II Edition) Wiley Eastern Limited, New Delhi, 1975. + +# Reference +* http://www.ignouonline.ac.in/gyandarshan/ +* https://easthub.wordpress.com/2009/09/20/mathematics-university-links/ +* https://easthub.wordpress.com/2009/09/19/madras-university-m-sc-mathematics-portion/ +* https://easthub.wordpress.com/2009/09/19/differential-equation-videos/ diff --git a/src/main/md/Math/MathsToLearn.md b/src/main/md/Math/MathsToLearn.md new file mode 100644 index 00000000..43fd7a28 --- /dev/null +++ b/src/main/md/Math/MathsToLearn.md @@ -0,0 +1,3 @@ +## Interactive Maths +* http://students.brown.edu/seeing-theory/index.html +* http://immersivemath.com/ila/ch05_gausselim/ch05.html \ No newline at end of file diff --git a/src/main/md/Math/NaiveBayesConditionalProbability.md b/src/main/md/Math/NaiveBayesConditionalProbability.md new file mode 100644 index 00000000..c7f0e47e --- /dev/null +++ b/src/main/md/Math/NaiveBayesConditionalProbability.md @@ -0,0 +1,80 @@ +* If a and b are the probabilities associated with two independent pieces of evidence, then combined they indicate a probability of: + +``` + ab +------------------- + ab + (1 - a)(1 - b) + + abc +--------------------------- +abc + (1 - a)(1 - b)(1 - c) +``` +* 7 feet tall indicates with 60% probability that someone is a basketball player, and carrying a basketball indicates this with 72% probability. If you see someone who is over 7 feet tall and carrying a basketball, what is the probability that they're a basketball player? + + * +``` + (.60)(.72) + ------------------------------- = 0.794 = 79.5% + (.60)(.72) + (1 - .60)(1 - .72) +``` + +* Reference (http://mathforum.org/library/drmath/view/62710.html) +```pre +In a box there are nine fair coins and one two-headed coin. One coin +is chosen at random and tossed twice. Given that heads show both +times, what is the probability that the coin is the two-headed one? +What if it comes up heads for three tosses in a row? + +I understand that there are 10 coins in total. My teammates tried it +out also and they got 4/9 + 4 for the first part and 8/9 + 8 for the +second part. I don't understand how they got this. + +Here's a way to think about it. Make a tree: + + flip two heads (1/4) + / + choose fair coin (9/10) + / \flip anything else (3/4) +10 coins + \ + choose two-headed coin (1/10) -> flip 2 heads (1/1) + + +Study this tree, and it becomes clear that there are 3 possibilities: + +1 - the top one has probability (9/10)*(1/4) = 9/40 +2 - the next one has probability (9/10)*(3/4) = 27/40 +3 - the last one has probability (1/10)*1 = 1/10 + +Before you did the experiment, these were all the possibilities there +were. Then you did the experiment. What did it tell you? It told you +that the middle option is out. The coin did NOT show a tail, so we +know it wasn't the second outcome. + +This narrows our universe to the 9/40 and the 1/10. The trick now is +to re-normalize these probabilities so that they show a total +probability of 1, but stay in the same ratio. Within that universe +(all the possibilities that are left) lines (1) and (3) remain in the +ratio 9:4. So the probability of the top one is 9/13 and the bottom +is 4/13, where 13 is just the sum of 9 and 4. + +Can you extend this reasoning to come up with the corresponding result +for three flips? + +(This kind of reasoning is called Bayesian probability, and it is one +of the most confusing topics in probability at any level of study.) +``` + +# Gregory's Theorem +* https://divisbyzero.com/2018/09/28/proof-without-word-gregorys-theorem/ + +# References +* http://www.paulgraham.com/naivebayes.html +* https://en.wikipedia.org/wiki/Naive_Bayes_spam_filtering + +# Youtube math tricks +* https://www.youtube.com/watch?v=Rgw9Ik5ZGaY +* Channel name - tecmath + + +# [Logistic Regression from Bayes' Theorem and Linear regression](https://www.countbayesie.com/blog/2019/6/12/logistic-regression-from-bayes-theorem) \ No newline at end of file diff --git a/src/main/md/Math/Numbers.md b/src/main/md/Math/Numbers.md new file mode 100644 index 00000000..4fa4e9d5 --- /dev/null +++ b/src/main/md/Math/Numbers.md @@ -0,0 +1,4 @@ +* https://en.wikipedia.org/wiki/List_of_recreational_number_theory_topics +* https://en.wikipedia.org/wiki/Harshad_number +* https://en.wikipedia.org/wiki/Happy_number +* \ No newline at end of file diff --git a/src/main/md/Math/ProbabilisticProgramming.md b/src/main/md/Math/ProbabilisticProgramming.md new file mode 100644 index 00000000..6b62ebf6 --- /dev/null +++ b/src/main/md/Math/ProbabilisticProgramming.md @@ -0,0 +1,9 @@ +* Probablistic programming + * [WebPPL Tutorial: Probabilistic Programming](https://www.youtube.com/watch?v=9SEIYh5BCjc) + * [MIT 6.041SC Probabilistic Systems Analysis and Applied Probability, Fall 2013](https://www.youtube.com/playlist?list=PLUl4u3cNGP60A3XMwZ5sep719_nh95qOe) + * [Tobias Gerstenberg - A counterfactual simulation model of causal judgment](https://www.youtube.com/watch?v=IkbEIKQqoyA) + * [Probabilistic Programming in the Real World - Zach Anglin](https://www.youtube.com/watch?v=5f-9xCuyZh4) +* PyMC + * [Markov chain Monte Carlo (MCMC)/Hamiltonian Monte Carlo](https://docs.pymc.io/notebooks/getting_started) + +* [MCMC](https://ermongroup.github.io/cs228-notes/inference/sampling/) diff --git a/src/main/md/Math/Probability.md b/src/main/md/Math/Probability.md new file mode 100644 index 00000000..c672b5a6 --- /dev/null +++ b/src/main/md/Math/Probability.md @@ -0,0 +1,77 @@ +* Probability is not always intuitive +* Probability is a mathematical description of randomness and uncertainty. It is a way to measure or quantify uncertainty. +* Probability is the underlying foundation for the methods of statistical inference. Probability can be used to quantify how much we expect random samples (collected as part of statistics) to vary. +* Probability can answer the question like "How likely is it that our sample estimate is no more than 3% from the true percentage of all U.S. adults who are in favor of the death penalty?” + +* Volunteer sample - is biased + * Determine the musical preferences of all students at your university, and assuming it is entire population + * We cannot generalize to any larger group at all. + * Volunteer samples tend to be comprised of individuals who have a particularly strong opinion about an issue, and are looking for an opportunity to voice it. +* Convenience sample - is biased + * Stand outside the Student Union, across from the Fine Arts Building, and ask students passing by to respond to your question about musical preference. + * Checking only music students is not entire population +* Sampling frame - biased + * Ask your professors for email rosters of all the students in your classes. Randomly sample some addresses, and email those students with your question about musical preference. + * list of potential individuals to be sampled—does not match the population of interest. +* Systematic sampling - Unbiased - Not subject to any bias + * Obtain a student directory with email addresses of all the university's students, and send the music poll to every 50th name on the list. + * If individuals are sampled completely at random, and without replacement, then each group of a given size is just as likely to be selected as all the other groups of that size. This is called a simple random sample (SRS). + +* Probability sampling plan (or technique) + * Simple Random Sampling + * Cluster Sampling + * Suppose that the city has 10 hospitals. Choose one of the 10 hospitals at random and interview all the nurses in that hospital regarding their job satisfaction. This is an example of cluster sampling, in which the hospitals are the clusters. + * Stratified Sampling + * Choose a random sample of 50 nurses from each of the 10 hospitals and interview these 50 * 10 = 500 regarding their job satisfaction. This is an example of stratified sampling, in which each hospital is a stratum. + +* Law of large numbers - The relative frequency of an event does indeed approach the theoretical probability of that event as the number of repetitions increases. This is called the Law of Large Numbers. +* The Law of Large Numbers states that as the number of trials increases, the relative frequency becomes the actual probability. So, using this law, as the number of trials increases, the empirical probability gets closer and closer to the theoretical probability. +* "How many times do I need to repeat the random experiment in order for the relative frequency to be, say, within .001 of the actual probability of the event?" +* Relative Frequency - (definition) The probability of an event (A) is the relative frequency with which the event occurs in a long series of trials. + +* For a "fair" coin (one that is not unevenly weighted, and does not have identical images on both sides) +* Aleatory vs Epistemic (errors) +* Determine probability: Theoretical (Classical) and Empirical (Observational). + + +## Discrete and Continious +* For example, the variable “number of times a college student changes major” is a discrete random variable. The (exact) weight of a person is a continuous random variable. +* Probability distribution = Probability model +* The outcomes described by the model are random. This means that individual outcomes are uncertain, but there is a regular, predictable distribution of outcomes in a large number of repetitions. + +## Stadard deviation +* Number that describes how much frequenceis could stay away from actual means. +* Higher the frequency, data also available far from mean + +##Histogram +* for Probability - The heights of all the rectangles in the histogram must sum to 1. This meant that the area was also 1. +* As the number of intervals increases, the width of the bars becomes narrower and narrower, and the graph approaches a smooth curve and looks like normal curve. + +# Probability density curve. +* Probability distribution of a continuous random variable is represented by a probability density curve. +* Area under Probability density curve is 1 +* P(X<9)=P(X≤9) as P(X=x)=0 for continious +* P(a < X < b) = Integration of (f(x)) limit a to b +* Sample distribution + * such as pregnancy lengths, shoe sizes, foot lengths, and other human physical characteristics exhibit these properties: symmetry indicates that the variable is just as likely to take a value a certain distance below its mean as it is to take a value that same distance above its mean +* Bell shaped - Closer to the mean is most likely +* Normal curve ~ Common curve ~ Natural curve +* mean (μ) and standard deviation (σ) +* Standard Deviation Rule (or the 68-95-99.7 rule) +* Normal random variable follow normal distribution (it is obviously continous variable) +* Quartile P(X) and P(X) + * Std-deviation of -0.68 and +0.68 (for 50%) +* Std-deviation of -0.10 and +0.10 (for 10% lower and highest) a = -1.29 b = 1.29 + +# Standard normal variable +* z-score = (x−μ)/σ or (value - mean)/standard deviation +* z-scores allow us to compare values of different normal random variables +* The normal table provides probabilities that a standardized normal random variable Z would take a value less than or equal to a particular value z*. +* Quartile P(X) and P(X) + * Std-deviation of -0.68 and +0.68 (for 50%) + + +# Some samples +* Length (in days) of human pregnancies is a normal random variable (X) with mean 266, standard deviation 16. + * 266 - 16 = 250~282, 234~298, 218~314 + * May-15~Feb-4-Result: 265 days \ No newline at end of file diff --git a/src/main/md/Math/Probability_Current_Affairs.md b/src/main/md/Math/Probability_Current_Affairs.md new file mode 100644 index 00000000..ec6e92b3 --- /dev/null +++ b/src/main/md/Math/Probability_Current_Affairs.md @@ -0,0 +1,4 @@ +* [Guess the number 33] (https://www.pinnacle.com/en/betting-articles/Pinnacle-Puzzle/guess-the-number-puzzle/WLZ2XXJEKBB5JZBG) +* [Nate Silver vs. Nassim Taleb] (https://towardsdatascience.com/why-you-should-care-about-the-nate-silver-vs-nassim-taleb-twitter-war-a581dce1f5fc) + +* [Matrix DS](https://matrixds.com/) \ No newline at end of file diff --git a/src/main/md/Math/Probability_Statistics_Awesomeness.md b/src/main/md/Math/Probability_Statistics_Awesomeness.md new file mode 100644 index 00000000..a3bd2309 --- /dev/null +++ b/src/main/md/Math/Probability_Statistics_Awesomeness.md @@ -0,0 +1,20 @@ +# Awesome + +## Simpson's paradox +## Randomized response +## Let's make deal paradox +## The Birthday Problem + + +## The Birthday Problem +* In a hall of 60 people what is the probability of 2 people's birthday to be same. = 99.995% +* P + P` = 1 +* Let us assume 365 days +* Number of links between 60 people, (60 * 59)/2 = 3538 combination of birthdays +* Let us assume 60-th person's birth day is X +* Let us assume 59th person birthday not to be X = 364/365 = 0.997260274 +* Let us assume 58th person birthday not to be X = 363/365 = 0.994520548 +* Let us assume 1stth person birthday not to be X = 363/365 = 0.835616438 +* Producut of all the above probablity (that no two person birthday matches) = 0.997 * 0.994 * .. * 0.8356 = 0.004911201 +* 1 - above= 1 - 0.004911201 = 99.9950888 +* The probability that a continuous random variable equals some value is always zero. \ No newline at end of file diff --git a/src/main/md/Math/SamplingDesign.md b/src/main/md/Math/SamplingDesign.md new file mode 100644 index 00000000..81cd4992 --- /dev/null +++ b/src/main/md/Math/SamplingDesign.md @@ -0,0 +1,40 @@ +## Design sampling + +* Prospective vs Retrospective Study +* Experiments vs. Observational Studies +* The most reliable way to determine whether the explanatory variable is actually causing changes in the response variable is to carry out a randomized controlled double-blind experiment. + * If neither the subjects nor the researchers know who was assigned what treatment, then the experiment is called double-blind. + + +## Observational studies: +* The explanatory variable's values are allowed to occur naturally. +* Because of the possibility of lurking variables, it is difficult to establish causation. +* If possible, control for suspected lurking variables by studying groups of similar individuals separately. +* Some lurking variables are difficult to control for; others may not be identified. + +## Experiments + +* The explanatory variable's values are controlled by researchers (treatment is imposed). +* Randomized assignment to treatments automatically controls for all lurking variables. +* Making subjects blind avoids the placebo effect. +* Making researchers blind avoids conscious or subconscious influences on their subjective assessment of responses. +* A randomized controlled double-blind experiment is generally optimal for establishing causation. +* A lack of realism may prevent researchers from generalizing experimental results to real-life situations. +* Noncompliance may undermine an experiment. A volunteer sample might solve (at least partially) this problem. +* It is impossible, impractical or unethical to impose some treatments. + +## Survey checklist +* A sample survey is a type of observational study in which respondents assess variables' values (often by giving an opinion). +* Open questions are less restrictive, but responses are more difficult to summarize. +* Closed questions may be biased by the options provided. +* Closed questions should permit options such as "other:______" and/or "not sure" if those options may apply. +* Questions should be worded neutrally. +* Earlier questions should not deliberately influence responses to later questions. +* Questions shouldn't be confusing or complicated. +* Survey method and questions should be carefully designed to elicit honest responses if there are sensitive issues involved. +* There should be equal number of favoural and un-favourable questions with neutral option + + +For the question, "Have you used illegal drugs in the past year?" +* If respondents answer no, then it is still a possibility that they did use illegal drugs, but didn't want to admit it. +* Respondents are told to flip a fair coin (in private) before answering and then answer based on the result of the coin flip: if the coin flip results in "Heads," they should answer "Yes" (regardless of the truth), if a coin flip results in "Tails," they should answer truthfully. Thus, roughly half of the respondents are "truth-tellers," and the other half give the uncomfortable answer "Yes," without the interviewer's knowledge of who is in which group. The respondent who flips "Tails" and answers truthfully knows that he or she cannot be distinguished from someone who got "Heads" in the coin toss. Hopefully, this is enough to encourage respondents to answer truthfully. As we will learn later in the course, the surveyor can then use probability methods to estimate the proportion of respondents who admit they used illegal drugs in this scenario, while being unable to identify exactly which respondents have been drug abusers. \ No newline at end of file diff --git a/src/main/md/Math/Statistics.md b/src/main/md/Math/Statistics.md new file mode 100644 index 00000000..7efeba12 --- /dev/null +++ b/src/main/md/Math/Statistics.md @@ -0,0 +1,132 @@ +* Vital Statistics + * Average +* Mathematics Statistics + * Variance + * Predictions +* Discrete - Countable - Color, Gender, Eye Color +* Continious - Measurable - Temperature, Height +* Florance Nightingale + * Nurse and Statistician + * Reported about army mortality and death caused by diseases like typhoid +* Probablity + * Probability approaches and distribution choices + * 5 Approachaes + * Subjective + * Game of chance + * Mathematical + * Relative Frequency + * Bayesian + * 6 main distributions + * Binomial + * Poisson + * Normal + * Chi-Squared + * t Distribution + * F Distribution + * Last 3 are used for statistical significance +* De Moivre's Theorem + * e^i@ (e power i theta) +* Binomial distribution + * Binomial distribution for fair coin with 'n' flips + * If a coin is flipped 10 times, chances of occurence of 10 heads + * (1/2)^10 = 1/1024 => 1 in 1024 is the possibility + * (p + q)^n --> binomial distribution + * When n approaches zero, Binomial becomes normal distribution + * Posson distribution + * The Poisson distribution can be used to calculate the probabilities of various numbers of "successes" based on the mean number of successes. ... The mean of the Poisson distribution is μ. The variance is also equal to μ. + * The fundamental trait of the Poisson is its asymmetry + * Extreme case of Binomial distribution + * Simon Denis Poission - (French mathematician, poisson -> Statistics) +* Normal distribution + * Also known as Gaussian distribution, Bell curve + * If a coin is flipped 10^10 times, chances of occurence of everything as heads, chances of occuring 1000 continious heads + * (p + q)^n --> binomial distribution, when n approaches infinity, binomial becomes normal + * Yard stick to compare other distributions + * Three mathematical property + * Mean (0), Standard-Deviation (1) + * Skewness (zero for normal), direction of tail indicates if it is +ve skewed or -ve skewed +* Central Limit Theorem + * Independent small errors, or law of error + * Pierre-Simon Laplace (french mathematician) + * Random number of very large number of many small and unrelated random effects will be approximately normally distributed +* Mean, Median, Mode + * There are 3 averages for mathematician + * Arithmatic mean, median and mode + * Sometime easier to find median than mean (Ex: height of 100 men) + * Median is middle (once sorted) - (n + 1)/2 spot in the ordered list + * It is the number such that half of the observations fall above, and half fall below. + * Mode - Modal family, most number of occrences. The value that occurs with the highest frequency is the mode. + * It is possible to have more than one mode + * Outliers (one of too high, too low) may skew Mean, but not mode. Mean is very sensitive to outliers. + * Easy to pick one among, to mislead data + * To know the truth, we should know variation around mean values + * Example: Median mortality of eight months means, it is 50th percentail point in distribution, 50% of people live longer than eigth months +* Sampling techniques + * Systematic - every n'th element. + * Incidental - use most accessible and available. (Unreliable) + * Purposive - Find representative + * Stratified - Select specific and find non-overlapping groups +* Plotting + * Histogram vs Bar Chart + * Frequency polygon (simplest curve fitting) +* The method of moments + * Average - first method of moments + * The average squared deviation (Variance) - second method of moments. Measures spread + * The average cubed deviation (Skewness) - third method of moments. Measures symmetry + * The average deviation raised to the 4th power (Kurtosis) - fourth method of moments + * Kurtosis - negative - less peaked normal curve + * Kurtosis - positive - high peaked normal curve + * Kurtosis - zero - normal curve + * Quartile and Range are other way to measure simple variances + * Covariance + * If two random variable moves in same direction - +ve variance + * If two random variable moves in opposite direction - -ve variance + * If two random variable not moves to each other - zero variance + * Standard deviation + * Small - most of the data is near mean + * Large - They are way away from mean + * Co-efficient of variation + * Karl_Pearson - created domain of Mathematical statistics + * Std.Deviation as % as of mean. [(Std.dev/Mean) * 100] + * Used to compare "regardless of underlying unit" + * Example: Compare celcius of london temp with "Farenheit of Newyork" + * Nominal + * Not continious, Example: Eye Color, + * Orinal + * if you sorted, what is the index number + * Like enums + * Interval and Ratio + * zero temperature - doesn't show lack of temperature + * 20 degress is not twice as 10 degree + * Correlatoin - using one body part, deriving whole species + * Illusory correlation vs spurious correlation + * Correlation is not percentage + * Correlation is curvilinear relationship (Exmple: Growth of human, small to high, high to small) + * Regression towards the mean (requires two lines)[Exmple: Heights of father, and Heights of son) + * Method of least square (reduces influence of errors) + * Correlation coefficient (r = 08, 0, -0.9) + * Product moment correlation coefficient + * Simple regression two variable (y =ax + b) + * Multi-variate - matrix based, multiple independent variables are involved + * QStatistic (short/tall, poor or rich) Derive binary from continious data + * Point-biserial correlation + * Person's triserial correlation + * Rank order correlation + * Tau coefficient + * Statistical Hypothesis + * Gosset guiness z-test + * When sample size is smaller, normal distribution won't fit + * First one used in quality control + * t-test is replacement for z-test + * Analysis of variance +* IQR - Another measure of spread is the inter-quartile range (IQR), which is the range covered by the middle 50% of the data. +* An observation is considered a suspected outlier if it is: + * Below Q1 - 1.5(IQR) + * Above Q3 + 1.5(IQR) +* Inferential Statistics + * Formal testing and estimation theory + * Based on random variation + +## Sampling +* Sample results change from sample to sample, is called sampling variability. +* The sample mean and sample standard deviation are slightly different from the population mean and standard deviation. \ No newline at end of file diff --git a/src/main/md/Math/StatisticsHigerSecondaryFirst.md b/src/main/md/Math/StatisticsHigerSecondaryFirst.md new file mode 100644 index 00000000..d3261d09 --- /dev/null +++ b/src/main/md/Math/StatisticsHigerSecondaryFirst.md @@ -0,0 +1,44 @@ +* Statistics is concerned with scientific method for collecting, organizing, summarizing, presenting, analyzing and interpreting of data. The word statistics is normally referred either as numerical facts or methods. +* Example usages + * number of accidents in a busy road of a city in a day + * number of people died due to a chronic disease during a month in a state + * Actuarial science is the discipline that extensively applies statistical methods among other subjects involved in insurance and financial institutions. + * Ttests of significance and confidence intervals are heavily used in Medical field +* Statistical subdomain + * Probability Theory, + * Sampling Theory, + * Statistical Inference, + * Design of Experiments, + * Correlation and Regression Methods + * Time Series and Forecasting Techniques. +* Functions of Statistics + * Collection + * Classification + * Condensation + * Comparison + * Correlation + * Causation. + * Chance +* Two major divisions of statistical methods - Descriptive statistics and inferential statistics + + +*Quantitative* - A variable is said to be quantitative if it is measurable and can be expressed in specific units of measurement (numbers). +*Qualitative* - A variable is said to be qualitative if it is not measurable and cannot be expressed in specific units of measurement (numbers). This variable is also called categorical variable. Example: Blood Type, Sex, Race, Religion +*Measurement Scales* - Ordinal, Nominal, Ratio and Interval (ONRI) +* Ordinal - 1/0, Male of Female +* Nominal - (1) Very unhappy (2) Unhappy (3) Okay (4) Happy (5) Very happy. (what is the difference between Okay and Happy?) +* Interval - 60 degree to 70 degree, 70 degree to 80 degree + * it does not mean that an object with temperature 120c is twice as hot as an object with temperature 60c (convert from F to C and compare) + * There may not be absolute zero (shouldn't confused with relative zero) +* Ratio - Height and Weight. + * Used for both descriptive and inferential statistics + * These variables can be meaningfully added, subtracted, multiplied, divided). + * Central tendency can be measured by mean, median, or mode; Measures of dispersion, such as standard deviation and coefficient of variation can also be calculated from ratio scales. + + +* “One death is a tragedy; a million is a statistics.” + + +* Gauss introduced the theory of errors in physical sciences at the end of eighteenth century. + +# \ No newline at end of file diff --git a/src/main/md/Math/img/image_shoe_male1.jpg b/src/main/md/Math/img/image_shoe_male1.jpg new file mode 100644 index 00000000..6deaa382 Binary files /dev/null and b/src/main/md/Math/img/image_shoe_male1.jpg differ diff --git a/src/main/md/Math/img/image_shoe_male2.jpg b/src/main/md/Math/img/image_shoe_male2.jpg new file mode 100644 index 00000000..f5c7a838 Binary files /dev/null and b/src/main/md/Math/img/image_shoe_male2.jpg differ diff --git a/src/main/md/Math/img/statistical_inference.gif b/src/main/md/Math/img/statistical_inference.gif new file mode 100644 index 00000000..3665f846 Binary files /dev/null and b/src/main/md/Math/img/statistical_inference.gif differ diff --git a/src/main/md/Math/statistical_reasoning/Confidence Intervals for the Population Mean Summary.pdf b/src/main/md/Math/statistical_reasoning/Confidence Intervals for the Population Mean Summary.pdf new file mode 100644 index 00000000..5b44b61a Binary files /dev/null and b/src/main/md/Math/statistical_reasoning/Confidence Intervals for the Population Mean Summary.pdf differ diff --git a/src/main/md/Math/statistical_reasoning/Exploratory_Data_Analysis.md b/src/main/md/Math/statistical_reasoning/Exploratory_Data_Analysis.md new file mode 100644 index 00000000..73010f5f --- /dev/null +++ b/src/main/md/Math/statistical_reasoning/Exploratory_Data_Analysis.md @@ -0,0 +1,61 @@ +## Eploratory Data Analysis + +This summary provides a quick recap of the material you've learned in the Exploratory Data Analysis unit. Please note that this summary does not provide complete coverage of the material, but just lists the main points. We therefore recommend that you use this summary only as a checklist or a review before going on to the next unit, or before an exam. + +* The purpose of exploratory data analysis (EDA) is to convert the available data from their raw form to an informative one, in which the main features of the data are illuminated. + +* When performing EDA, we should always: + * use visual displays (graphs or tables) plus numerical summaries. + * describe the overall pattern and mention any striking deviations from that pattern. + * interpret the results we got in context. + +* When examining the distribution of a single variable, we distinguish between a categorical variable and a quantitative variable. + +* The distribution of a categorical variable is summarized using: + + * Display: pie-chart or bar-chart (variation: pictogram → can be misleading—beware!) + * Numerical summaries: category (group) percentages. + +* The distribution of a quantitative variable is summarized using: + + * Display: histogram (or stemplot, mainly for small data sets). When describing the distribution as displayed by the histogram, we should describe the: + * Overall pattern → shape, center, spread. + * Deviations from the pattern → outliers. + * Numerical summaries: descriptive statistics (measure of center plus measure of spread): + * If distribution is symmetric with no outliers, use mean and standard deviation. + * Otherwise, use the five-number summary, in particular, median and IQR (inter-quartile range). +* The five-number summary and the 1.5(IQR) Criterion for detecting outliers are the ingredients we need to build the boxplot. Boxplots are most effective when used side-by-side for comparing distributions (see also case C→Q in examining relationships). + +* In the special case of a distribution having the normal shape, the Standard Deviation Rule applies. This rule tells us approximately what percent of the observations fall within 1,2, or 3 standard deviations away from the mean. In particular, when a distribution is approximately normal, almost all the observations (99.7%) fall within 3 standard deviations of the mean. + +* When examining the relationship between two variables, the first step is to classify the two relevant variables according to their role and type: and only then to determine the appropriate tools for summarizing the data. (We don't deal with case Q→C in this course). + * There are explanatory categorical variables, explanatory quantitative variables, response categorical variables, and response quantitative variables. It is possible for any type of explanatory variable to be paired with any type of response variable. The possible pairings are: Categorical Explanatory → Categorical Response (C→C), Categorical Explanatory → Quantitative Response (C→Q), Quantitative Explanatory → Categorical Response (Q→C), and Quantitative Explanatory → Quantitative Response (Q→Q) +* Case C→Q: + Exploring the relationship amounts to comparing the distributions of the quantitative response variable for each category of the explanatory variable. To do this, we use: + + * Display: side-by-side boxplots. + * Numerical summaries: descriptive statistics of the response variable, for each value (category) of the explanatory variable separately. + +* Case C→C: + Exploring the relationship amounts to comparing the distributions of the categorical response variable, for each category of the explanatory variable. To do this, we use: + + * Display: two-way table. + * Numerical summaries: conditional percentages (of the response variable for each value (category) of the explanatory variable separately). +* Case Q→Q: + We examine the relationship using: + + * Display: scatterplot. When describing the relationship as displayed by the scatterplot, be sure to consider: + * Overall pattern → direction, form, strength. + * Deviations from the pattern → outliers. +Labeling the scatterplot (including a relevant third categorical variable in our analysis), might add some insight into the nature of the relationship. + +In the special case that the scatterplot displays a linear relationship (and only then), we supplement the scatterplot with: + +* Numerical summaries: the correlation coefficient (r) measures the direction and, more importantly, the strength of the linear relationship. The closer r is to 1 (or -1), the stronger the positive (or negative) linear relationship. r is unitless, influenced by outliers, and should be used only as a supplement to the scatterplot. + * When the relationship is linear (as displayed by the scatterplot, and supported by the correlation r), we can summarize the linear pattern using the least squares regression line. Remember that: + * The slope of the regression line tells us the average change in the response variable that results from a 1-unit increase in the explanatory variable. + * When using the regression line for predictions, you should beware of extrapolation. + +* When examining the relationship between two variables (regardless of the case), any observed relationship (association) does not imply causation, due to the possible presence of lurking variables. + +* When we include a lurking variable in our analysis, we might need to rethink the direction of the relationship → Simpson's paradox. \ No newline at end of file diff --git a/src/main/md/Math/statistical_reasoning/HT_population_mean.md b/src/main/md/Math/statistical_reasoning/HT_population_mean.md new file mode 100644 index 00000000..8f120084 --- /dev/null +++ b/src/main/md/Math/statistical_reasoning/HT_population_mean.md @@ -0,0 +1,35 @@ + +To Summarize +1. In hypothesis testing for the population mean (μ), we distinguish between two cases: + I. The less common case when the population standard deviation (σ) is known. + II. The more practical case when the population standard deviation is unknown and the sample standard deviation (s) is used instead. +2. In the case when σ is known, the test for μ is called the z-test, and in case when σ is unknown and s is used instead, the test is called the t-test. +3. In both cases, the null hypothesis is: +H0 : μ =μ0 +and the alternative, depending on the context, is one of the following: +Ha : μ < μ0, or Ha : μ>μ0, or Ha: μ ≠ μ0 +4. Both tests can be safely used as long as the following two conditions are met: +(i) The sample is random (or can at least be considered random in context). +(ii) Either the sample size is large (n > 30) or, if not, the variable of interest can be assumed to vary normally in the population. + +5. In the z-test, the test statistic is: +z= +¯¯¯ +(X − μ0) /(σ/√n) + +whose null distribution is the standard normal distribution (under which the p-values are calculated). + +6. In the t-test, the test statistic is: +t = +¯¯¯ +(X − μ0)/ (s/√n) + +whose null distribution is t(n - 1) (under which the p-values are calculated). + +7. For large sample sizes, the z-test is a good approximation for the t-test. + +8. Confidence intervals can be used to carry out the two-sided test +H_0: μ = μ_0 +vs. +Ha :μ≠μ0 , and in cases where Ho is rejected, the confidence interval can give insight into the value of the population mean (μ). +9. Here is a summary of which test to use under which conditions: \ No newline at end of file diff --git a/src/main/md/Math/statistical_reasoning/Probability.md b/src/main/md/Math/statistical_reasoning/Probability.md new file mode 100644 index 00000000..0ff12be7 --- /dev/null +++ b/src/main/md/Math/statistical_reasoning/Probability.md @@ -0,0 +1,69 @@ + +## General Remarks about probability +* Probability is a discipline by itself. In the context of the big picture of this course, probability is used to quantify the imperfection associated with drawing conclusions about the entire population based only on a random sample drawn from it. + +* We talk about the probability of an event, which is a statement about the outcome of a random experiment. In practice, each event corresponds to a subset of outcomes from the sample space. + +* Probability of an event can be as low as 0 (when the event is impossible) and as high as 1 (when the event is certain). + +* In some cases the only way to find the probability of an event of interest is by repeating the random experiment many times and using the relative frequency approach. + +* When all the possible outcomes of an random experiment are equally likely, the probability of an event is the fraction of outcomes which satisfy it. + +## Random Variables + +* A random variable is a variable whose values are numerical results of a random experiment. + +* A discrete random variable is summarized by its probability distribution -- a list of its possible values and their corresponding probabilities. + + * The sum of the probabilities of all possible values must sum to 1. + + * The probability distribution can be represented by a table, histogram, or a formula. + +* The probability distribution of a random variable can be supplemented by numerical measures of center and spread of the random variable. + + * Center: The center of a random variable is measured by its mean. + + * -- The mean of a random variable can be interpreted as its long run average. + + * -- The mean is a weighted average of the possible values of the random variable weighted by their corresponding probabilities. + + * Spread: The spread of a random variable is measured by its variance, or more typically by its standard deviation (the square root of the variance). + + * -- The standard deviation of a random variable can be interpreted as the typical (or long run average) distance between the value that the random variable assumes and the mean of X. + +## Continuous Random Variables +The probability distribution of a continuous random variable is represented by a probability density curve. The probability that the random variable takes a value in any interval of interest is the area above this interval and below the density curve. + +An important example of a continuous random variable is the normal random variable, whose probability density curve is symmetric (bell-shaped), bulging in the middle and tapering at the ends. + +* There are "many" normal random variables, each determined by its mean μ (which determines where the density curve is centered) and standard deviation σ, (which determines how spread out (wide) the the normal density curve is). + +* Any normal random variable follows the Standard Deviation Rule which can help us find probabilities associated with the normal random variable. + +* Another way to find probabilities associated with the normal random variable is using the standard normal table. This process involves finding the z-score of values, which tells us how many standard deviations below or above the mean the value is. + +* * An important application of the normal random variable is that it can be used as an approximation to the binomial random variable (under certain conditions). A continuity correction can improve this approximation. + +## Sampling Distributions +A *parameter* is a number that describes the population, and a statistic is a number that describes the sample. + + * Parameters are fixed, and in practice, usually unknown. + + * Statistics change from sample to sample due to sampling variability. + + * The behavior of the possible values the statistic can take in repeated samples is called the sampling distribution of that statistic. + +## The sampling distribution of the sample proportion, p, (under certain conditions): + + * is centered around p, the proportion in the entire population from which the sample is drawn. + * has standard deviation of √p(1−p)/n + * is approximately normal (under certain condtions). + +According to the Central Limit Theorem, the sampling distribution of the sample mean, ¯¯¯X (X-bar): +* is centered around μ the mean in the entire population from which the sample is drawn. +* has a standard deviation of σ√n +* .for large enough sample size n, is approximately normal (regardless of the shape of the population distribution). + +## Reference +* [CMU statistical reasoning summary] (https://oli.cmu.edu/jcourse/workbook/activity/page?context=8ff078e10a0001dc511d84c945d35bdd) \ No newline at end of file diff --git a/src/main/md/Math/statistical_reasoning/SamplingDistribution.md b/src/main/md/Math/statistical_reasoning/SamplingDistribution.md new file mode 100644 index 00000000..a7847f9b --- /dev/null +++ b/src/main/md/Math/statistical_reasoning/SamplingDistribution.md @@ -0,0 +1,4 @@ + * Sample results change from sample to sample, is called sampling variability. + * parameter and statistic - (definition) A parameter is a number that describes the population; a statistic is a number that is computed from the sample. + * sample size plays a role in the spread of the distribution of sample proportion: there should be less spread for larger samples, more spread for smaller samples. + * \ No newline at end of file diff --git a/src/main/md/Math/statistical_reasoning/StatisticalInference.md b/src/main/md/Math/statistical_reasoning/StatisticalInference.md new file mode 100644 index 00000000..97a42c04 --- /dev/null +++ b/src/main/md/Math/statistical_reasoning/StatisticalInference.md @@ -0,0 +1,10 @@ +* Value of population proportion, based on an observed sample proportion. This process—inferring something about the population based on what is measured in the sample—is (as you know) called statistical inference. + +* "We are 95% confident that the sample mean x(par) falls within 3 units of μ population-mean" (Language of probability) +* "We are 95% confident that the population mean μ falls within 3 units of aritmetic-mean of statistics x(bar)" (Language of statistics) + +* There is a trade-off between the level of confidence and the precision with which the parameter is estimated. +* Higher the confidence tends to lower precision +* The same level of confidence (95%) with narrower precision is possible by increasing sample-size. +* On an intuitive level, if our estimate x^ is based on a larger sample (i.e., a larger fraction of the population), we have more faith in it, or it is more reliable, and therefore we need to account for less error around it. +* \ No newline at end of file diff --git a/src/main/md/Math/statistical_reasoning/StatisticsUniversity_Linkds.md b/src/main/md/Math/statistical_reasoning/StatisticsUniversity_Linkds.md new file mode 100644 index 00000000..a6beaa7b --- /dev/null +++ b/src/main/md/Math/statistical_reasoning/StatisticsUniversity_Linkds.md @@ -0,0 +1,7 @@ +* [Experimental Design and Analysis](http://www.stat.cmu.edu/~hseltman/309/Book/Book.pdf) +* [Old but gold](http://www.stat.yale.edu/Courses/1997-98/101/stat101.htm) +* [Harvard stats math and derivation of formula](https://projects.iq.harvard.edu/stat110/home) +* [Introduction to Probability -Provided by Harvard University (HarvardX)](https://www.edx.org/course/introduction-to-probability-0) + * The lecturer draws a lot of real life example when talking about probability. And he also derives by hand for things like mean and variance for different distributions. +* [STAT 415 Intro Mathematical Statistics](https://newonlinecourses.science.psu.edu/stat414/node/50/) +* We are 95% confident that by using 260 days as the estimate for μ, our estimation error is no more than 3 days. \ No newline at end of file diff --git a/src/main/md/Math/statistical_reasoning_cmu_oli.md b/src/main/md/Math/statistical_reasoning_cmu_oli.md new file mode 100644 index 00000000..a62cd4c3 --- /dev/null +++ b/src/main/md/Math/statistical_reasoning_cmu_oli.md @@ -0,0 +1,46 @@ +* Stem plot +* Quantitative variable is described by its shape, center, and spread. +* IQR = Q3 - Q1 +* An observation is considered a suspected outlier if it is: + * Below Q1 - 1.5(IQR) + * Above Q3 + 1.5(IQR) +* The combination of all five numbers (min, Q1, M, Q3, Max) is called the five number summary, +* Standard deviation + * One with biasness + * Bessel's Correction SD is without biasness +* The Standard Deviation Rule + * Also known as empirical rule + * 68, 95, 99.7 - Area under different standard deviation + * If SD is zero, data in the problem in unifrom, no deviation + * The standard deviation measures the spread by reporting a typical (average) distance between the data points and their average. + * Mean, SD should be used for symmetric data. + +* When creating a scatterplot, the explanatory variable should always be plotted on the horizontal X-axis +* Scaterplot patterns + * +Positive + * -Negatives + * V-Shape (Neither +ve, nor -Ve) or (Half +ve and Half -ve) + * The average fuel usage of driving a fixed distance in a car, and the speed at which the car drives: + * Curve linear + * Cluster + * Strong or Weak relationship + * Can find "diminishing returns" + +* Linear relationship + * Strength of a linear relationship - Correlation coefficient and is denoted by r. + * R ranges from -1 to +1 + * Negative relationship -1, Positive relationship +1, No relationship 0 + * +Ve would produce Scaterplot patterns positive + + +* Four kinds of co-relation + * C-Q + * C-C + * Q-C + * Q-C + +* Co-relation + * We use the terms explanatory and response variables instead of independent and dependent variables. +# Reference +* [Paper describes different distributions](http://people.stern.nyu.edu/adamodar/pdfiles/papers/probabilistic.pdf) +* [Geogebra](https://www.geogebra.org/) \ No newline at end of file diff --git a/src/main/md/Mind/All.md b/src/main/md/Mind/All.md new file mode 100644 index 00000000..109d3866 --- /dev/null +++ b/src/main/md/Mind/All.md @@ -0,0 +1,8 @@ +* [Swami Sarvapriyananda-"BHAGVAD GITA FOR STUDENTS" at IIT Kanpur](https://www.youtube.com/watch?v=_EqwlOeTj7Y) +* [Swami Sarvapriyananda-"PURPOSEFUL LIVING WITH VEDANTA" at IIT Kanpur](https://www.youtube.com/watch?v=U2z1zdOsVEg) +* [Swami Sarvapriyanandaji, Ramana Jayanti, Arunachal Ashram, Jamaica, NY](https://www.youtube.com/watch?v=X2rfZ1ytWD8) +* [Practical Methods of Meditation | Swami Sarvapriyananda](https://www.youtube.com/watch?v=h8m8ac_aBao) +* [Swami Sarvapriyananda at IITK - "Who Am I?" according to Mandukya Upanishad-Part 1](https://www.youtube.com/watch?v=eGKFTUuJppU) +* [Swami Sarvapriyananda-"Secret of Concentration" at IIT Kanpur](https://www.youtube.com/watch?v=BGswR0tMqCM) +* [Swamini Vimalananda - Mastering the Mind](https://www.youtube.com/watch?v=EXniWH9dMf8&t=6s) +* [Swamini Vimalananda speaks on Mind Management at IIT Kanpur on 26 Sept. 2014](https://www.youtube.com/watch?v=PhwtixXNbjs) \ No newline at end of file diff --git a/src/main/md/Mind/Concentration.md b/src/main/md/Mind/Concentration.md index 1d80c9c4..7a413b35 100644 --- a/src/main/md/Mind/Concentration.md +++ b/src/main/md/Mind/Concentration.md @@ -1,100 +1,115 @@ -## [Swami Sarvapriyananda-"Secret of Concentration" at IIT Kanpur](https://www.youtube.com/watch?v=BGswR0tMqCM) - -## Conentration -* Whatever you need to acheive, you require conentration, conentration gives **richer experience**. -* Quality of life - * Life depends on quality of experience - * Experience depends on quality of conentration -* We all have limited cognitive capacity -* We are 128 bit processing person -* when we use all of that 128 bit, will ignore everything else like hunger, weather, senses. Becuase there is no bandwidth -* 128 bit is based on book Flow, fact may vary but context remains same. -* Choose what to conentrate on, how much you want to concentrate-on. -* Milton - “The mind is its own place, and in itself can make a heaven of hell, a hell of heaven..” - * By choosing what to concentrate - * By choosing how much to concentrate - -## Flow requires followings - * Requires high level of skill - * Requires difficult activity - * Action and awareness merging (Absorbed into the scientific work and forget about himself) - * Clear goal should be there - * For research, we may not have goal, but aware what goal we try to achieve - * Feedback is required - -## Natural concentration -* When you watch movie and cricket - * We don't feel discomfort - * Won't feel hungry - * When you are deeply obsorbed you won't notice other senses feedback - -## Vivekananda - * Vivekananda was able to read entire volume of "Encyclopedia Britanica" and able to answer the questions - * He also mentioned that with meditation (and concentration) and purity, anybody can do it. - * He also symbolised Snake for concentration - * Learn the power of detachment - * Detachment should be practicsed, detach from all the other issues. - * Education is all about power of concentration. -* Nepolean opens the drawyer of issue and close all the other drawyer, don't worry and confuse with other drawyer - - -## Challenges vs skills - Flow model -* Arousel (ready to face challenge) (Highest challenge, moderate skill) -* Anxiety (Highest challenge, lower than moderate skill) -* Worry (Moderate challenge, lower skill) -* Apathy (Lower challenge, lower skill) -* --- -* Boredom (Lower challenge, moderate skill) -* Relaxation (Lower challenge, highest skill) -* Control (moderate challenge, highest skill) -* Flow (Highest skill and highest challenge) - -## How does it feel to be in a Flow? - * Completely involved in what you are doing- focused, concentrated. Even lossing other senses (hot, hungry, time) - * A sense of ecstasy - of being outside everyday reality - * Great inner clarity knowing what needs to be done, and how well we are doing (feedback) - * A sense of serenity - no worries about oneself, and a feeling of growing beyond the boundaries of the ego - * Intrinsic motivation - whatever produces flow becomes its own reward - * Timelessness - thoroughly focused on the present, hours seem to pass by in minutes or minutes looks like years. - - -## Vivekananda About Goal -* Goal should have an end. Find what is the end (To complete IIT, find highest paying job in the world) -* Forget the end and focus on the means -* Keep on thinking about final goal won't help -* Clear goals are necessary for flow, without clear goals we will end up in Anxiety - -## Flow and Yoga -* Start with right posture, sit straight. Don't lean forward like sleeping or releaxed -* Do pranayama - concentration will make breathing in both nostril, Balanced breath should be there to focus. - * We can try with "Nadi suddhi pranayama" (In the ration of - Breath in 1 vs breath out 2) -* Arouse curiosity, setup a problem and read material so you will be curious and concentrate more. -* Avoid polluted, noisy area and try to withdraw from rest of the world. -* Focus (Dharana) - * Be active reader, mark and do underline, place your question, write what you agree, disagree -* Stay on unbroken focus (Dhyana) - -## Yoga -* Yama - Moral life -* Niyama - Discipline life -* Asana - Posture -* Pranayama - Deep breath regularly -* Prathyara - Withdrawing from world. Means literally “control of ahara,” or “gaining mastery over external influences.”. Intelligent but don't struggle. -* Dharana - Focus, Concentration -* Dyana - Meditation, Unbroken focus -* Samadhi - -## Flow : is a function that involves Skill, Challenge and produces new state -* Flow : (Skill, Challenge) => State -* Concentraion: directly proportionate to mental purity (uncomplicated life is also mental purity) -* Kama (pleasure (ex:partying, movie, etc), Artha (money, success, awards, promotion), we need them instinctively. But they should be regulated by Dharana (Morality) -* Pamara = [(Kama + Artha) - Moral] ~~> Produces unpleasant life -* Visayi = [(Kama + Artha) + Moral] ~~> Leads life within bounds, and laws -* Gandhi, Anna Hasare, Social Workers = [(Moksha + Dharma) - Karma - Artha ] ~~> Gives up personal gain, works for wellfare of thers. - -## Reference -"Yoga is one of the oldest and most systematic methods of producing the flow experience" -[Ted Talk by Martin Seligman](http://www.ted.com/talks/martin_seligman_on_the_state_of_psychology?language=en#t-9177) -[Ted Talk by Mihaly Csikszentmihalyi](http://www.ted.com/talks/mihaly_csikszentmihalyi_on_flow?language=en) -[Sharatchandra Chakravarti experience of Vivekananda](http://www.swamivivekanandaquotes.org/2014/05/swami-vivekanandas-reading-speed-and-memory-power.html) -[Attaining flow](https://images.google.com/?q=attaining+flow) +## [Swami Sarvapriyananda-"Secret of Concentration" at IIT Kanpur](https://www.youtube.com/watch?v=BGswR0tMqCM) + +## Conentration +* Whatever you need to achieve, you require concentration, concentration gives **richer experience**. +* The degree between ordinary and extra-ordinary person lies in degree of concentration +* Quality of life + * Life depends on quality of experience + * Experience depends on quality of concentration +* We all have limited cognitive capacity +* We are 128 bit processing person +* when we use all of that 128 bit, will ignore everything else like hunger, weather, senses. Because there is no bandwidth +* 128 bit is based on book Flow, fact may vary but context remains same. +* Choose what to concentrate on, how much you want to concentrate-on. +* Milton - "The mind is its own place, and in itself can make a heaven of hell, a hell of heaven.." + * By choosing what to concentrate + * By choosing how much to concentrate + +## Flow requires followings + * Requires high level of skill + * Requires difficult activity + * Action and awareness merging (Absorbed into the scientific work and forget about himself) + * Clear goal should be there + * For research, we may not have goal, but aware what goal we try to achieve + * Feedback is required + +## Natural concentration +* When you watch movie and cricket + * We don't feel discomfort + * Won't feel hungry + * When you are deeply Absorbed you won't notice other senses feedback + +## Vivekananda + * Vivekananda was able to read entire volume of "Encyclopedia Britanica" and able to answer the questions + * He also mentioned that with meditation (and concentration) and purity, anybody can do it. + * He also symbolised Snake for concentration + * Learn the power of detachment + * Detachment should be practiced, detach from all the other issues. + * Education is all about power of concentration. +* Napoleon opens the drawyer of issue and close all the other drawyer, don't worry and confuse with other drawyer + +## How to improve + * Cultivate the habit of paying fullest attention to what you are doing + * Train the mind to concentrate, by keep concentrating + * If we keep our mind wander, it wound wander always, it won't be good concentration + * Read two books Flow and RAPT + +## Challenges vs skills - Flow model +* Arousel (ready to face challenge) (Highest challenge, moderate skill) +* Anxiety (Highest challenge, lower than moderate skill) +* Worry (Moderate challenge, lower skill) +* Apathy (Lower challenge, lower skill) +* --- +* Boredom (Lower challenge, moderate skill) +* Relaxation (Lower challenge, highest skill) +* Control (moderate challenge, highest skill) +* Flow (Highest skill and highest challenge) + +## How does it feel to be in a Flow? + * Completely involved in what you are doing- focused, concentrated. Even lossing other senses (hot, hungry, time) + * A sense of ecstasy - of being outside everyday reality + * Great inner clarity knowing what needs to be done, and how well we are doing (feedback) + * A sense of serenity - no worries about oneself, and a feeling of growing beyond the boundaries of the ego + * Intrinsic motivation - whatever produces flow becomes its own reward + * Timelessness - thoroughly focused on the present, hours seem to pass by in minutes or minutes looks like years. + + +## Vivekananda About Goal +* Goal should have an end. Find what is the end (To complete IIT, find highest paying job in the world) +* Forget the end and focus on the means +* Keep on thinking about final goal won't help +* Clear goals are necessary for flow, without clear goals we will end up in Anxiety + +## Flow and Yoga +* Start with right posture, sit straight. Don't lean forward like sleeping or relaxed +* Do pranayama - concentration will make breathing in both nostril, Balanced breath should be there to focus. + * We can try with "Nadi suddhi pranayama" (In the ration of - Breath in 1 vs breath out 2) +* Arouse curiosity, setup a problem and read material so you will be curious and concentrate more. +* Avoid polluted, noisy area and try to withdraw from rest of the world. +* Focus (Dharana) + * Be active reader, mark and do underline, place your question, write what you agree, disagree +* Stay on unbroken focus (Dhyana) + +## Yoga +* Yama - Moral life +* Niyama - Discipline life +* Asana - Posture +* Pranayama - Deep breath regularly +* Prathyara - Withdrawing from world. Means literally “control of ahara,â€� or “gaining mastery over external influences.â€�. Intelligent but don't struggle. +* Dharana - Focus, Concentration +* Dyana - Meditation, Unbroken focus +* Samadhi + + +# Quotes +* Concentration comes out of a combination of confidence and hunger +* Focusing is about saying "NO" +* Deciding what not to do is as important as deciding what to do +* Avoid everything in general and foucs one thing in particular. +* Your goal is not to battle with the mind (distractions), but to witness the mind. + + +## Flow : is a function that involves Skill, Challenge and produces new state +* Flow : (Skill, Challenge) => State +* Concentraion: directly proportionate to mental purity (uncomplicated life is also mental purity) +* Kama (pleasure (ex:partying, movie, etc), Artha (money, success, awards, promotion), we need them instinctively. But they should be regulated by Dharana (Morality) +* Pamara = [(Kama + Artha) - Moral] ~~> Produces unpleasant life +* Visayi = [(Kama + Artha) + Moral] ~~> Leads life within bounds, and laws +* Gandhi, Anna Hasare, Social Workers = [(Moksha + Dharma) - Karma - Artha ] ~~> Gives up personal gain, works for wellfare of thers. + +## Reference +* "Yoga is one of the oldest and most systematic methods of producing the flow experience" +* [Ted Talk by Martin Seligman](http://www.ted.com/talks/martin_seligman_on_the_state_of_psychology?language=en#t-9177) +* [Ted Talk by Mihaly Csikszentmihalyi](http://www.ted.com/talks/mihaly_csikszentmihalyi_on_flow?language=en) +* [Sharatchandra Chakravarti experience of Vivekananda](http://www.swamivivekanandaquotes.org/2014/05/swami-vivekanandas-reading-speed-and-memory-power.html) +* [Attaining flow](https://images.google.com/?q=attaining+flow) diff --git a/src/main/md/Mind/IWouldPractice_Programming_Mind.md b/src/main/md/Mind/IWouldPractice_Programming_Mind.md new file mode 100644 index 00000000..404a5e6b --- /dev/null +++ b/src/main/md/Mind/IWouldPractice_Programming_Mind.md @@ -0,0 +1,28 @@ +# Practice +* [advertisement itself has wonderful message](http://mindvalley.com) +* [SpeedReading](../tools/speed_read.md) +* The Benefits of Talking to Yourself + * Talk loudly yourself + * Talk as if yourself is a third person, and don't use " I can do", rather "Jane! You can do this" + * Feedback hypothesis - The idea is, if you hear a word, that helps you see something?" + * My bet is that self-talk works best on problems where you are trying to stay on task and there are possible distractions + * For tasks with a multistep sequence, talking to yourself out loud can help you keep out distractions and remind yourself where you are + * Don't stop the chain - https://jamesclear.com/stop-procrastinating-seinfeld-strategy + +* Practicing Affirmation + * Practice using prayer beats + * Each beat chant an affirmation + * Repeat same chant over and over again + * To find the secrets of the universe think in terms of "Energy", "Frequency vibration" + * Good Affirmation + * Concise choice of positive words (Example: I love red gala apple, rather I love apple) + * Clear visualization + * Corresponding Feeling +* Mind wondering + * Mind wondering is opposite of concentration - You are not in control. + * Concentrated mind is - you are in control. + +# References +* [Program Your Mind While You Sleep](https://www.youtube.com/watch?v=X_MDb0HzFE0) +* [Practicing Affirmation](https://www.youtube.com/watch?v=Gku2OodrnQ0) +* [The Benefits of Talking to Yourself](https://mobile.nytimes.com/2017/06/08/smarter-living/benefits-of-talking-to-yourself-self-talk.html) \ No newline at end of file diff --git a/src/main/md/Mind/MasterMind.md b/src/main/md/Mind/MasterMind.md new file mode 100644 index 00000000..1ace9d71 --- /dev/null +++ b/src/main/md/Mind/MasterMind.md @@ -0,0 +1,46 @@ +# [Dandapani - Master Your Mind - PART 1/2 | London Real](https://www.youtube.com/watch?v=FeGOZ9gUe5k) +* What comes before meditation? Concentration! Concentration comes before meditation? +* So you need to learn how to concentrate in order to meditate, +* We believe that you can't meditate if you can't concentrate +* Meditation can't change your life style! You create a lifestyle first! meditation second! Then meditation affects a lifestyle not the other way around +* if you want [to] get to know yourself you need to spend time [with] yourself + * Within you, you ask yourself what you like? what you don't like? + * Do you like sport xyz? do you like psychology? computer? + * Spend time with yourself as if you are third party + +# [Swami Niranjanananda on "How You control the Mind ?"](https://www.youtube.com/watch?v=Ll5Z8m8YOoM&t=14s) +* You can't train a horse within a day, it requires months/year to train, and daily practice +* Horses are wild, run when it runs, rest when it runs +* Just like kid followed the horses to become its friend, you could become friend of your mind. +* We have four horses, Manas, Putthi, Chittha and Ahankara. We can't become instant master +* We should work harder daily to master, not within hours could attain +* We should become friend to ourselves +* Pratiyahara - secret is controlling mind + * Lesson of Pratiyahara, become friend of your mind + * If we are not friend to our mind, we would fight with that +* Those who meditate more than 20 minutes (without doing their work) are lazy as per "Niranjanananda" joke, but he claims as joke +* We should become friend of "Manas, Putthi, Chittha and Ahankara" +* Self friendship, inner process of Yoga begins +* Knowing unknown dimensions of our personalities will help + +# [The Science Of Yogic Breathing | Sundar Balasubramanian | TEDxCharleston](https://www.youtube.com/watch?v=aIfwbEvXtwo) + * Controlling mind is not easy mind is very elusive as the eastern philosophy puts it mind is a monkey it's not a normal monkey it's a drunken munkey stung by scorpion. + * Thirumoolar says there is an easy way, mind cannot travel on its own mind needs a vehicle + * Mind is using a horse that horse is our breathing. + * So if you want to control the mind the rider, you have to control the vehicle the horse the breathing + * Controlling the mind is possible by controlling the breathing + * Therefore whether you sing chant or hum or practice sophisticated yogic breathing techniques mind your breathing all ways + * It will improve your health. + + +# How to control Negative thought & Excessive Sexual Desire by Swami Niranjananda Sarswati +* Cultivate opposite thought + * Connect with positive thought + * When your mind says "I don't like xyz", cultivate "I like xyz because of behaviour 123" +* We have passion and lust + * We can cultivate into opposite, +ve one and convert into seeking passion + * Ramakrishna thought her wife as "Kali-devi", Treated her Cosmic mother +* How they teach in Ashram + * Through asana you can control urge + * Through meditation, you can change the attitude +* One who is connected with positive for uplift is bramhacharia (Celibacy) \ No newline at end of file diff --git a/src/main/md/Mind/Meditation.md b/src/main/md/Mind/Meditation.md index a2998f2e..030ce7e8 100644 --- a/src/main/md/Mind/Meditation.md +++ b/src/main/md/Mind/Meditation.md @@ -1,6 +1,6 @@ ## Why to meditate -* During meditation thoughts will wander, and aware our original mind behaviour +* During meditation thoughts will wander, and aware our original mind behavior * We will observe the pattern in which wander, and how it creates emotion * It will remind some of the most important tasks, plan of us * It improves will power @@ -10,3 +10,22 @@ * Holy meditation helps to burn out all mental impurities. * This perfect rest we will get in meditation. Not even the deepest sleep will give you such a rest as meditation can. * Not even the deepest sleep will give you such a rest as meditation can. The mind goes on jumping even in deepest sleep. Just those few moments in meditation your brain has almost stopped. You forget the body. You feel such pleasure in it. You become so light. This perfect rest we will get in meditation. + + +# [Swami Sarvapriyananda on Introduction to Meditation](https://www.youtube.com/watch?v=A6J9qEYeZbs) + +* Meditate lot when mood is good, if mood is bad, still meditate as long as possible +* Meditate at the same time +* Meditate at the same place, use same mat, use same chair, and only use that for meditation +* Don't fight with negative thoughts, and don't control them, it might becomes violent +* Be careful about company (friends), and what you consume (like TV) + * One who talks about money would inject thoughts about money + * One who talks about politics would inject thoughts about power +* Bring simplicity to life, hardship and serenity to life +* Before meditation, forget everything possible, Detachment is required. + * Job, relationship, external world + * Thoughts +* Great desire is required for realization + * Offer everything to god +* Holy company is very good + * Listen about god from who realized, and practiced many years \ No newline at end of file diff --git a/src/main/md/Mind/Memory_Remember.md b/src/main/md/Mind/Memory_Remember.md new file mode 100644 index 00000000..17179e43 --- /dev/null +++ b/src/main/md/Mind/Memory_Remember.md @@ -0,0 +1,48 @@ +## Rememberance +* Tell me, I forget, teache me, I may remember, *involve* me and I learn +* *SRS, Flashcards, Testable Notes* +* Reading technical book for awareness and mastery are different things +* Use Feynmen technique to remember +* Taking hand written notes helps, notes acts like "Chronological memory jog" +* search::study + hacks on hackernews +* Summarize each chapter in your own words within a single page +---- +# Memory factors +* Do you observe or see them? +* How keen your observation matters most to memory? +* Have you associated +---- +Why Association works? +* Spelling can be remembered using assoication + * Piece of a pie + * Never believe a lie +---- +# Link method + * Helps to remember list of items + * Association should be ridiculous or illogical + * Should see the picture of the word in our mind, don't see the word +---- +* Example link for ridiculous & illogiacl - Bottle & paper association (X and Y) + * reading a giant bottle instead of paper - use x like y + * Writing on giantic bottle instead of paper - use y like x + * Bottle pouring paper instead of water - y producing x or x producing y + * Bottle mad of paper instead of glass - x made of y or y made of x +---- +## To make illogicals +* Gigantic or too small - Out of proportion +* With Action insted of idle + * Violent and Embarassing than plesant (Human rember violent and embarassing things longer) +* Exaggerate the number of items (in millions) +* Picturing one item instead of other (bottle made of paper) +---- +## Peg method + +---- +## Memory Quotes +* True art of memory is attention +---- +# Reference +* (Ask HN: How do you remember what you read?) +* [How to develop super power memory](https://archive.org/details/HowToDevelopASUPERPOWERMEMORYHarryLorayne/page/n7) +* [How To Develop A Super Power Memory](http://www.ownways.com/how_to_develop_a_super_power_memory/Contents.html) +*[Super memory it can be yours](https://issuu.com/snehalwankhede/docs/super-memory-it-can-be-yours-) \ No newline at end of file diff --git a/src/main/md/Mind/MindTools.md b/src/main/md/Mind/MindTools.md new file mode 100644 index 00000000..9cade68c --- /dev/null +++ b/src/main/md/Mind/MindTools.md @@ -0,0 +1,9 @@ +* Auto-suggestive state +* Autogenic_training +* Plant Psychology + +* We are what we repeatedly do. Excellence, then, is not an act, but a habit. + Aristotle +### [Autobiography of a yogi](https://www.ananda.org/autobiography/) + + diff --git a/src/main/md/Mind/SecuretOfConcentraionAndSelfControl.md b/src/main/md/Mind/SecuretOfConcentraionAndSelfControl.md new file mode 100644 index 00000000..7fbec2ee --- /dev/null +++ b/src/main/md/Mind/SecuretOfConcentraionAndSelfControl.md @@ -0,0 +1,67 @@ +## [What is the real meaning of Concentration and Self Control ? | Swami Sarvapriyananda](https://www.youtube.com/watch?v=_TMLjeJjE2Y) + +## Self Control & Concentration +* Philosophy of education + * Five questions + * What is the aim or purpose of education? + * Character building + * Transmission of culture + * Enable person to function in society + * What is to be thought to students? + * Teach certain skill + * Music/Art + * What is the method of education? + * What is the role of teacher? + * What form of discipline should be adopted? + * Many doesn't discuss about it. + +# According to Vivekanada +* What is the goal of education? + * Training in Self control + * Concentration of mind +* What is special method of education? + * Special method is Concentration + * Importance of education is concentration of Mind, not collecting facts + * The degree between ordinary and extra-ordinary person lies in degree of concentration + * Ability to focus and hold-on-to-it till it gets completed + * How to improve concentration? + * Read RAPT book + * Read Flow book + * Cultivate the habit of paying fullest attention to what you are doing + * Train the mind to concentrate, by keep concentrating + * If we keep our mind wander, it wound wander always + +# Know from many people's failure. +* We won't complete many thing we started. +* We start to read many books, will give-up +* Create large time table, but they fail to keep it up + +# Teach will power to attain the goal + * Ability to delay gratification + * EQ is actually dealy the gratification + * Ability to sacrifice for future benefits + * Being less impulsive + * Self regulation, self will +# Concentration + * Ability to read the Encyclopedia + +* Does will power and concentration reduce Joy in the present for the benefit of Future? + * Ans: As per "Vivekananda", + * Greater is the happiness as this will is more successfully manifest + * **This very ability that I can control my thought, that I can control my activities, behavior and speech gives happiness* + * **This very power over my life that I can control it that gives happiness** + * Control the mind every moment, and be blissful always + + +* Marshmallow self control test + * Kids who doesn't learn if=>then relationship, waiting would give more benefit, But they failed even in schools + * Kids who couldn't control at the age of 4 were not changed by 14 years of future education. + * Kids waited for 2 marsh-mellow rather eating 1 marsh-mellow right now + * Some eat even before it was distributed + * Students waited flourished a lot in future life, students unable to wait didn't succeed in life + +## Reference +* [Walter Mischel, Philip Zimbardo-2008][Psycology and Life](https://www.youtube.com/watch?v=y7t-HxuI17Y) +* [Professor Mahan Maharaj](https://www.youtube.com/watch?v=Zz0QaXERInw) +* [Rapt: Attention and the Focused Life](https://www.amazon.com/Rapt-Attention-Focused-Winifred-Gallagher/dp/0143116908) +* [Flow: The Psychology of Optimal Experience (Harper Perennial Modern Classics)](https://www.amazon.com/Flow-Psychology-Experience-Perennial-Classics/dp/0061339202/ref=pd_bxgy_14_img_2?_encoding=UTF8&pd_rd_i=0061339202&pd_rd_r=ZEW1C07646SQ148YZJDC&pd_rd_w=2k9d7&pd_rd_wg=NxHq4&psc=1&refRID=ZEW1C07646SQ148YZJDC) \ No newline at end of file diff --git a/src/main/md/Mind/TamilPhilosohpies.md b/src/main/md/Mind/TamilPhilosohpies.md new file mode 100644 index 00000000..33ae8630 --- /dev/null +++ b/src/main/md/Mind/TamilPhilosohpies.md @@ -0,0 +1,10 @@ +* [சித்தர் பாடல்கள்](https://www.projectmadurai.org/pmworks.html) +* [மகாவாக்கியங்கள்](https://www.sivathondan.org/%e0%ae%ae%e0%ae%95%e0%ae%be%e0%ae%b5%e0%ae%be%e0%ae%95%e0%af%8d%e0%ae%95%e0%ae%bf%e0%ae%af%e0%ae%99%e0%af%8d%e0%ae%95%e0%ae%b3%e0%af%8d/) +* [Tirumantiram - திரு மந்திரம்](http://www.tamilvu.org/library/l4100/html/l41A0ind.htm) +* [ சித்தர் பாடல்கள்: சிவவாக்கியம்](https://www.projectmadurai.org/pm_etexts/pdf/pm0269.pdf) +* Thiruvasagam - திருவாசகம் - c. 862 C.E. – 885 C.E - Manikkavacakar +* Tirumantiram - திருவாசகம் - c. 880 C.E. – 925 C.E - Thirumular + +# [ சித்தர் பாடல்கள்: சிவவாக்கியம்](https://www.projectmadurai.org/pm_etexts/pdf/pm0269.pdf) + + diff --git a/src/main/md/Mind/WhatToTeachtoKids.md b/src/main/md/Mind/WhatToTeachtoKids.md new file mode 100644 index 00000000..58765417 --- /dev/null +++ b/src/main/md/Mind/WhatToTeachtoKids.md @@ -0,0 +1,5 @@ +# Good Drawing Skill + +* +* 30@Akin'sLaw - If you want to have a maximum effect on the design of a new engineering system, learn to draw. Engineers always wind up designing the vehicle to look like the initial artist's concept. +* \ No newline at end of file diff --git a/src/main/md/Mind/WonderfulThoughts.md b/src/main/md/Mind/WonderfulThoughts.md new file mode 100644 index 00000000..e699b42a --- /dev/null +++ b/src/main/md/Mind/WonderfulThoughts.md @@ -0,0 +1,34 @@ +# [What I learnt from Sports](India Vs Bangladesh T20 Nidas Trophy Final 2018|Last Over Full Thriller Match|https://www.youtube.com/watch?v=njqnq3d6rz8) + * Ref: India Vs Pakistan T20 world cup 2007 super over win the match + * I watch plenty of cricket, but what recently observed is successful sportsperson should have two qualities + * Skill + Keeping Cool + * With only skill, Pakistan bowlers would have won the game. They are known as best skill-full bowlers + * With only skill, DK would have been successful in many other occasions, but he also increase skill + attitude + * Similarly Dhoni is known as wonderful cricketer, but definitely not the best among top batters, but he achieved by keeping himself cool on many occasion + * Similarly Zidaine would have won the game for France, but he lost his coolnees, due to famous headbut he failed the team. But skill-wise he is legend + * How to keep ourself calm regardless of situation is the key + * We should practice being calm + * Daily Meditation, Keeping us fit and Breathe exercise + * Let go of any grudges and judgments, don't take it personally, don't judge that moment + * Look at the situation as it truly is. If it is nervous for us, it could be nervous for others who create such a situation. Think from others perspective. Find the unseen gifts in the situation. + * Determine your desired outcome, but on work and task don't think about it often, rather focus on process. + * Remove yourself from the situation + * Positive Self-Talk, self suggestion + * Additional observation from my friend + * If Pakistan would have started bowling, India might have lost, sometime opportunity luck matters + * Not trusting their regular style of bowling is crazy, when under pressure use your time tested technique + * Never forget to practice the basics (here basic bowling to hit stick was forgotten by experienced bowler) + * Little bit of careless attitude helps when under pressure + * Being in similar situation using simulation or past experience or anticipated practice would helps + + +# [Sadhguru & Zakir Nair : Views on Religion](https://www.youtube.com/watch?v=mzJVnPJ3LyI) +* Today religion means conflict + * Because we reduced religion into set of beliefs +* If you want to take inward yourself, you have to transform yourself +* if you want to believe something, you don't want to transform yourself +* The moment you believe something, you will have tremendous confident +* An intelligent person is constantly hesitating with life with every step, He wonders whether what he's doing is okay or not. But a fool has absolute confidence +* It the very nature of your intelligence is such that if you do something's too stupid today tonight your intelligence will bother you why did I do this +* if you get God's stamp on your stupidity you don't have to turn back and see. You can do grossest things on the planet and feel very proud that your anyway going to go go to heaven +* Two belief will always will have conflict, hence religion becomes problem diff --git a/src/main/md/Mind/active_listening.md b/src/main/md/Mind/active_listening.md new file mode 100644 index 00000000..adc153a1 --- /dev/null +++ b/src/main/md/Mind/active_listening.md @@ -0,0 +1,26 @@ +**Smark people talk while wise would listen** + +* Active Listen = To Understand + To Feel + To Perceive + To Sense * +* True listening requires setting aside oneself. (M Scott Peck) + +1. Hear what people are really saying +2. Hear not only words, listen their body language +3. Try to understand the whole message + +**How to stop drifting our mind and reinforce message** +1. Repeat in your mind the words the speaker says +2. Choose to listen and avoid Judgement +3. Always allow the speaker to finish + +**Pay attention** +* Sit front and center +* Always keep your body language energetic (worst case fake it) +* Listener's body language can control listener's mind +* Avoid distracted by environmental factors +* Nod occasionally and smile +* Encourage the speaker with expression like Yes, oh, wow +* Paraphrase using what "I am hearing is.." + +**Respond** +* Be candid, honest and open +* Put yourself speaker's shoe diff --git a/src/main/md/Mind/effective_conversation.md b/src/main/md/Mind/effective_conversation.md new file mode 100644 index 00000000..bd4c0a67 --- /dev/null +++ b/src/main/md/Mind/effective_conversation.md @@ -0,0 +1,27 @@ +* Effective conversation requires right balance between talking and listening +* You should communicate with both people you like and you don't like +* We should walk away with feeling like engaged and inspired +* 10 ways to have a better conversation + * 1. Don't multitask, Be in that moment. Focus on that Conversation. + * 2. Don't Pontificate. Don't state your hard opinion unless it carries on the conversation. + * (Doon't express +your opinions in a pompous and dogmatic way.)(write blog for pontification) + * 3. Use open ended question + * Enter every conservation assuming you have something to learn and use who what when where and why. Let the other one describe the situation + * If you put in a complicated question, you're going to get a simple answer out + * What was it like? How do you feel? + * 4. Go with flow, Don't stop listening. Let thoughts ebb and flow continue focusing on the conservation. + * 5. If you don't know, say that you don't know. Don't pretend to be an expert. + * 6. Don't assume your experiences are their experiences All experiences are unique. + * Their experience maybe more terrible + * Conversations are not commercial opportunity, don't promote + * 7. Don't repeat yourself. Try not to repeat yourself. + * 8. Details don't matter, leave them out. Stay out of the weeds (don't worry about date, and names) + * 9. Listen. Resist the urge to talk, and avoid being distracted by random thoughts. + * If your mouth is open, you are not listening. + * No man ever listened his way out of a job + * 10. Be Brief + * A good conversation is like a miniskirt; short enough to retain interest, but long enough to cover the subject + +## References +* 10 ways to have a better conversation diff --git a/src/main/md/Mind/habit_formation/HabitGroup.md b/src/main/md/Mind/habit_formation/HabitGroup.md new file mode 100644 index 00000000..2afe63b9 --- /dev/null +++ b/src/main/md/Mind/habit_formation/HabitGroup.md @@ -0,0 +1,44 @@ +# Overview +* We are a bunch of people "to form a new habit", "to quit" and learn about habit itself and obviously help each others and ourselves. We can choose one of the book like "Atomic Habits by James Clear", "Mini Habits: Smaller Habits, Bigger Results by Stephen Guise" or "Tiny Habits - BJ Fogg" + +## Todo to be in the group +* Your committed to form a habit +* Practice at-least 2 minutes within a day +* Peform the same set of things, so that you could compare and improve +* Post your daily update with day-count, Your update is motivation for others, so please update. +* Appreciate others or chase others or kick others out + +## Tips to form habit +* We are what we repeatedly do. Excellence, then, is not an act, but a habit. +* The amount of time you have been performing a habit is not as important as the number of times you have been formed it +* Whatever the habit, try to break it into 2 minutes task and link with existing habit +* To quit habit, try to replace with completely different habit at difference place. Observe existing cue and environments that triggers bad habit, and avoid them + +## Guidelines +* Don't quote conversations from this group to outside, avoid lengthy personal conversations. +* Anyone who joins, can learn about us in first 10 days, and should commit something in next 7 days. And they should post their update everyday. + * When we post our update, others would get reminded about their habits, In turn it helps others. It is not about boasting! +* If someone doesn't form any habit and not updating others, anyone can kick them out after first 17 days. But please drop them a gentle reminder and note before kicking them out! +* Use Habit calendar to track and don't break the chain + +# Sample committment +* Right after brusing, I would do 200 skipping +* Right after skipping, I would do 15 push-ups +* Right after push-ups, I would drink 3 glass of water +* Right after drinking, I would read financial news-paper +* Right after reading paper, I would practice singing for 2 minutes +* Right before hitting bed, I would read current affairs in French language +* Right before hitting bed, I would read book and improve vocabulary +* Right after using washroom, I would do plank for 2 minutes + +# Sign of not-appropriate committments +* Right after brushing, I would complete a chapter from machine learning book (doesn't fit within 2 minute) +* Can you minimize your habit to 2 minute? (Example: I am a vegetarian for whole week) +* I would learn python everyday (what is minified version of your commitment?) + +## References +* https://jamesclear.com/habit-stacking +* https://jamesclear.com/stop-procrastinating-seinfeld-strategy + +# Other Books +* [Habit Stacking: 127 Small Changes to Improve Your Health, Wealth, and Happiness](https://www.amazon.com/gp/product/B06XP2B5QC/ref=dbs_a_def_rwt_hsch_vapi_tkin_p1_i4) diff --git a/src/main/md/Mind/habit_formation/HabitGroup.pdf b/src/main/md/Mind/habit_formation/HabitGroup.pdf new file mode 100644 index 00000000..b6749b33 Binary files /dev/null and b/src/main/md/Mind/habit_formation/HabitGroup.pdf differ diff --git a/src/main/md/Mind/habit_formation/If your committed to form a habit.md b/src/main/md/Mind/habit_formation/If your committed to form a habit.md new file mode 100644 index 00000000..b9a1b946 --- /dev/null +++ b/src/main/md/Mind/habit_formation/If your committed to form a habit.md @@ -0,0 +1,35 @@ +# If your committed to form a habit + +* Practice at-least 2 minutes within a day +* Peform the same set of things, so that you could compare and improve +* Post your daily update with, Your update is motivation for others, so please update. +* Appreciate others or chase others or kick others out + +# Tips to form habit + +* We are what we repeatedly do. Excellence, then, is not an act, but a habit. +* The amount of time you have been performing a habit is not as important as the number of times you have been formed it +* Whatever the habit, try to break it into 2 minutes task and link with existing habit + + +# Sample committment + +* Right after brusing, I would do 2 pushups +* Right after using washroom, I would do plank for 2 minutes +* Right before hitting bed, I would read current affairs in French language +* Right before hitting bed, I would read some book and add one more word to my vocabulary + + +# Template to form habit.. + +Right after abc.., I will do-abc +Right after anchor-behaviour.., I will do new-tiny-habit + +# Why Tiny habit? + +* Tiny habit can start now +* Tiny is fast +* Tiny is safe +* Tiny can grow big +* Tiny doesn't rely on motivation or will-power + diff --git a/src/main/md/Mind/habit_formation/atomic_habits.md b/src/main/md/Mind/habit_formation/atomic_habits.md new file mode 100644 index 00000000..7371becc --- /dev/null +++ b/src/main/md/Mind/habit_formation/atomic_habits.md @@ -0,0 +1,98 @@ +# Atomic Habits +* Spending atleast 2 minutes for something that we like to improve +* Aggregation of marginal gains + +# Why Habits? +* Good Habits create freedom. + * Good financial habits vs Bad financial habits + * Good health habits vs Bad health habits + * Good gtd habits vs Bad gtd habits + +# How Habits formed? +* Cue - Different Cue for different person +* Crave - Motivational force +* Response - Habit +* Reward - Satisfy/Teachus +* Cue|Crave|Response|Reward +* (Cue|Crave=Problem Phase)|(Response|Reward=Solution Phase) +* bored|need_to_do_be_entertained|watch_netflix|enjoy_entertainment +* We associate problem phase with solution +* Cue - Difficult office task, crave for relief, response - check twitter, reward - relieved (if office issue, check twitter) + +# Cue +* Unconscious cues? +* Conscious Cues? + +# Crave +* What we crave for is the state change (not habit itself) +* Craving for feeling of relief than Cigarette +* Craving for entertainment than Television +* Craving for clean month than brushing + +# Why habit won't be formed +* Lack of cue, habit will not start +* Reduce the craving, reduce the motivation +* Make habit difficult such that we can't do that, we won't even start +* Rewards fails to satisfy, then habit becomes useless + +# To Create Habit +* Cue: Make it obvious (clear) +* Craving: Make it attractive +* Response: Make it easy +* Reward: Make it satisfying + +# Habit formation +* Prepare Habit table +* Point and call (don't just think) + * After 3:00PM, I would book flight ticket (loud and clear) +* Specify DTP, (date, time and place) to perform a habit + * Everyday 5:55PM, I would double check the office todo +* Habit Stacking - Diderot Effect + * One spending leads to another, one purchase leads to another, One habit can be stacked with another + * Right after brushing my teeth, I would weigh my weight, and do 10 push ups at the hall. + +# To find a bad Habit +* List all your habits +* Score +/-/= +* Say loud before every habit and its consequences + +# To break a bad Habit +* Cue: Make it invisible (clear) +* Craving: Make it un-attractive +* Response: Make it hard +* Reward: Make it unsatisfying + +# Pointing and Calling +* Use eyes, mouth, ear and hands +* "I have got my keys, wallet, phone, train-pass and ear-phone" + +# Quotes +* Until you make unconscious conscious, it will direct your life and you call it fate - Carl Jung +* Plant a tiny seed in right spot, and it will grow without coaxing +* B=F(P,E) = Behaviour is function of person in an environment +* B=MATHS = Behaviour = Motivation + Ability + Trigger + Habit + Success + +# Useful tiny habits +* Use behaviour stack, behave, Dance/reward/Bingo/IAmSUPER +* After I pee, I do 2 push-ups +* BehaviourGrid: ![alt text][BehaviourGrid] +* After I drink a glass of water, I will do 10-15 body-weight squats. +* After I pour a cup of tea, I will do a 30-second plank. +* After I go the restroom, I will do four or five push-ups. +* After I walk in my office, I will do 10-15 reverse dumbbell flies. +* After I walk in my apartment, I will do 10-15 lateral dumbbell raises. +* After I watch video, I will leave a comment + +# If habits are not sticking +* Do we really need it? Do we really love that skill/output? +* Do we despararately need them? +* Are there anything more important than what we try? + + +# Reference +* [BJ Fogg - Tiny habits - Stanford Behavior Design Lab](https://www.bjfogg.com/) +* [Habits - The Definitive Guide to Lasting Change](http://www.selfication.com/) +* [Habit Stacking: 97 Small Life Changes That Take Five Minutes or Less](https://www.amazon.com/gp/product/B00JQHB67O/ref=dbs_a_def_rwt_hsch_vapi_tkin_p2_i3) +* [S J Scott](https://www.developgoodhabits.com/about-s-j-scott/) + +[BehaviourGrid]: img/bjfogg.png "BehaviourGrid" \ No newline at end of file diff --git a/src/main/md/Mind/habit_formation/atomic_habits.pdf b/src/main/md/Mind/habit_formation/atomic_habits.pdf new file mode 100644 index 00000000..b6ed9773 Binary files /dev/null and b/src/main/md/Mind/habit_formation/atomic_habits.pdf differ diff --git a/src/main/md/Mind/habit_formation/habit.md b/src/main/md/Mind/habit_formation/habit.md new file mode 100644 index 00000000..52bf49ec --- /dev/null +++ b/src/main/md/Mind/habit_formation/habit.md @@ -0,0 +1,25 @@ +## Habits +* Knowing it, doing it is two different things +* Once you decided to stop, you would stop + * Don't say last time, till next week +* Don't try to make the big change all at once +* 1% better or 1% worst is not going to impact everyday, but as time goes on this 1% small improvements or declines compound +* This 1% compound effect make huge impact on people compared with rest of the people +* Good habits are not enough, it takes only one/two small habits to hold you back. +* The only person you can compare with you is the person you were yesterday +* We can change *our world* with one step at a time +* Sometime we think so small, self-pity, envious - but that way of thinking seems small in the moment, In fact it might even make you feel better in the moment, but that way of thinking is eating away at your mental strength +* I have to work late - You give away the power +* Xyz drives me crazy - "You give away the control" +* Unhealthy beliefs about the world - "If I put in hard work, success should fall on my lap" + +## Give up your bad mental habits. + * Mental strength is a lot like physical strength if you wanted to be physically strong, you need to go to the gym and lift weights but if you really wanted to see results you'd also have to give up eating junk food + * Mental strength is the same if you want to be mentally strong you need good habits like practicing gratitude. + * But you also have to give up bad habits like resenting somebody else's success no matter how often that happens it'll hold you back. + * Counter unhealthy mental habits with healthier habits + +* [The Power of Forming Habits | David Nevins | TEDxCushingAcademy](https://www.youtube.com/watch?v=iUKwFuV6FaA) +* [The Secret of Becoming Mentally Strong](https://www.youtube.com/watch?v=TFbv757kup4) + + \ No newline at end of file diff --git a/src/main/md/Mind/habit_formation/img/bjfogg.png b/src/main/md/Mind/habit_formation/img/bjfogg.png new file mode 100644 index 00000000..bf461540 Binary files /dev/null and b/src/main/md/Mind/habit_formation/img/bjfogg.png differ diff --git a/src/main/md/Mind/habit_formation/my_tiny_automic_habits.md b/src/main/md/Mind/habit_formation/my_tiny_automic_habits.md new file mode 100644 index 00000000..c236d7c6 --- /dev/null +++ b/src/main/md/Mind/habit_formation/my_tiny_automic_habits.md @@ -0,0 +1,6 @@ +## My Tiny Habits +* Skip 200 count right after the bed. First Thing Skip. [FTS] +* After entering home, should read two pages of book. To make environment better, keep a book near door. [EHR] +* After using wash room, should do 10 push-ups. [WRP] +* Before unlocking home computer, read a page from "Daily Divine Digest/Thinasari Dhyanam" - [UCR] +* Just before hitting bed, should read two pages worth of "Securities Domain" content - [RPB] \ No newline at end of file diff --git a/src/main/md/Mind/mind_hacks.md b/src/main/md/Mind/mind_hacks.md new file mode 100644 index 00000000..0f0c173c --- /dev/null +++ b/src/main/md/Mind/mind_hacks.md @@ -0,0 +1,10 @@ +* Artificial deadlines to take decisions + * To put an end to the decision making processes, he sets a deadline for the decision to be made. Say 6pm on Monday. At five minutes to 6 he usually doesn't know the answer but in those 5 minutes something clicks, and by 6pm the answer is always there. +* 10/10/10 rule + * How will he feel about the outcome 10 minutes from now? How about 10 months from now? How about 10 years from now? +* Doesn't matter after 'X time' + * If there's an important meeting with stakeholders, a scary appointment with the doctor or a tough chat with an employee - Simply keep in mind the fact that by "X time", the thing will have passed and won't matter anymore. If it doesn't matter after X time, chances are it probably doesn't matter now. + + +## References +*[Ask HN: What are your “brain hacks” that help you manage everyday situations?](https://news.ycombinator.com/item?id=18588727) \ No newline at end of file diff --git a/src/main/md/Mind/motivational_quotes_videos.md b/src/main/md/Mind/motivational_quotes_videos.md new file mode 100644 index 00000000..e21d58f7 --- /dev/null +++ b/src/main/md/Mind/motivational_quotes_videos.md @@ -0,0 +1,54 @@ +# About me +* My enthusiasm far exceeds my talent! + +# Going fast +* If you want to go fast, go alone; if you want to go far; go togther. +* If everything seems under control you�re just not going fast enough. +* "Move fast and break things. Unless you are breaking stuff, you are not moving fast enough." +* Sometimes you need to slow down to go fast +* If you wish to travel Far and Fast, travel light +* Innovation is moving at a scarily fast pace +* Fail fast, learn fast and improve fast +* + +# Grow up +* “It is only because of problems that we grow mentally and spiritually. ” + + +# Differences +* “Share our similarities, celebrate our differences.” + + + +# Motivational videos +* [Why it Pays to Be Hungry | Les Brown | Goalcast](https://youtu.be/xFr0FKnaLDk) + +# Motivational blogs +* [Do nothing that is of no use.](https://rubikscode.net/2018/04/23/how-to-use-miyamoto-musashis-philosophy-to-become-better-software-crafter/) + +# Motivational quotes +* A life is about putting others' needs above your own fears. +* Do nothing that is of no use. +* Continuous improvement of yourself should be one of the most important things in your life. There is always something new that you need to learn +* Everyone you will ever meet knows something you don't + +# Fight and War quotes +* If you're not humble, life will visit humbleness upon you. - Mike Tyson +* You can only fight the way you practice. + +## From Autobiography of Yogi +* Sri Yuketshwar public speeches emphasized the value of Kriya Yoga, and a life of self-respect, calmness, determination, simple diet, and regular exercise.* +Look fear in the face and it will cease to trouble you. - Sri Yuketshwar +Good and positive suggestions should instruct the sensitive ears of children. Their early ideas long remain sharply etched.” +He is a fool that cannot conceal his wisdom +Disbelieve in the reality of sickness even when you are ill; an unrecognized visitor will flee! - Sri Yukteswar +Good manners without sincerity are like a beautiful dead lady - Sri Yukteswar +“Straightforwardness without civility is like a surgeon’s knife, effective but unpleasant. Candor with courtesy is helpful and admirable.” - Sri Yukteswar +Inwardly humble and outwardly unbendable - Sri Yukteswar +“Softer than the flower, where kindness is concerned; stronger than the thunder, where principles are at stake.” - Sri Yukteswar +As soon as the devotee is willing to go even to the ends of the earth for spiritual enlightenment, his guru appears near-by. +By a number of means—by prayer, by will power, by yoga meditation, by consultation with saints, by use of astrological bangles—the adverse effects of past wrongs can be minimized or nullified. + +## Advices +* “Destroy wrong desires now; otherwise they will follow you after the astral body is torn from its physical casing. Even when the flesh is weak, the mind should be constantly resistant. If temptation assails you with cruel force, overcome it by impersonal analysis and indomitable will. Every natural passion can be mastered." +* \ No newline at end of file diff --git a/src/main/md/Mind/my_guitar.md b/src/main/md/Mind/my_guitar.md new file mode 100644 index 00000000..0602c5e6 --- /dev/null +++ b/src/main/md/Mind/my_guitar.md @@ -0,0 +1,8 @@ +# Guitor lesson +* https://www.youtube.com/watch?v=4EVT2VNMcpA&list=PL-RYb_OMw7GfG6MS0WBO1v2qvtomUkZci + +# Music Theory +* [Music Theory Basics for Guitar: Lesson 1 - The Musical Alphabet](https://www.youtube.com/watch?v=Mj36tEcalBs) +* [Music Theory Basics for Guitar: Lesson 2 - The Major Scale](https://www.youtube.com/watch?v=Iw-06hLOjyU) +* [Music Theory Basics for Guitar: Lesson 3 - Circle of 4ths](https://www.youtube.com/watch?v=ezzuTObED2o) +* [8 Facts About the Circle of Fifths that you May Not Already Know]-(https://www.youtube.com/watch?v=50CpDZvTWks) \ No newline at end of file diff --git a/src/main/md/Mind/problems.md b/src/main/md/Mind/problems.md new file mode 100644 index 00000000..1209fae5 --- /dev/null +++ b/src/main/md/Mind/problems.md @@ -0,0 +1,6 @@ +* Restate +* Rephrase +* Rejoin +* Revisit +* Rebalance +* Reorganize diff --git a/src/main/md/success.md b/src/main/md/Mind/success.md similarity index 100% rename from src/main/md/success.md rename to src/main/md/Mind/success.md diff --git a/src/main/md/Mind/tiny_habit_list.md b/src/main/md/Mind/tiny_habit_list.md new file mode 100644 index 00000000..3d2aa11d --- /dev/null +++ b/src/main/md/Mind/tiny_habit_list.md @@ -0,0 +1,23 @@ +* Running for 2 minutes +* Jumping for 2 minutes +* Reading for 2 minutes +* Walking 10 steps backward +* Conscious deep breathing for 2 minutes +* Washing your face with cold water in the morning! +* Squatting for 2 minutes +* Drinking water right when you wake up +* Writing todo list in the morning +* Making your bed +* Cleaning/washing up right after dinner +* Organising clothes and packing bag the night before +* Polishing your shoes +* Ironing clothes +* Smile/laugh for 2 minutes +* Deleting Social Media +* Drink two glasses of water right after I stand up in the morning +* Brushing before hitting bed +* At the end of the day, trying mentally naming 3 things you where grateful for that day +* Read a self help paragraph before going to bed at night. +* sleep without pillow for 2 minutes - https://drhealthbenefits.com/lifestyle/healthy/healthy-habits/sleeping-without-a-pillow-benefits +* Self-hypnosis every day has helped me change my mental habits +* Arranging something in home for 2 minutes \ No newline at end of file diff --git a/src/main/md/MinimumJavascript.md b/src/main/md/MinimumJavascript.md new file mode 100644 index 00000000..c1b1dbb4 --- /dev/null +++ b/src/main/md/MinimumJavascript.md @@ -0,0 +1,33 @@ +# The Minimum Javascript you should know when you code reactjs & redux + +* '10' + 20 ~~> 1020 +* 'truthy' is if non boolean value converts to true +* 'falsy' is if non boolean value converts to false +* !! 'double exclamation' can be used to find if a value is truthy or falsy + * !!{} + * !!0 + * !![] +* Short circuit operator + * console.log(test && test.toPrint) +* * + + +# feedbacks +* @10,@11 rather naming test and test2, name the function as nullPrinter and stuffPrinter +* showing whereto use short-circuit in reactjs is ++ +* @14, Object.freeze and enum pattern would have been better. +* @17, it should be "VM231:7 Uncaught ReferenceError: c is not defined" +* R.partialRight was wow! never used +* @24 there is a typo, it should be x instead of X, and reduce may not require explicit 0. (but explici passing considered good!) +* @26 was it poop, shit! nice pun! +* @26 - there are two the, sounds wrong to return the just the names of the".. +* once you encourage let, why do you fallback to var in 26 +* @35 - iPhone-7 has 2gb ram instead of 3gb +* @43 - `latest` addition? will it make sense 3 years from now?, I have been using more than 2 years. +* @48 - What is commonJs? No background... +* Just a note: Ommitting "js" while importing considered bad design. +* @51 - file1.js and file2.js, common Tomy, you could do cooler :) +* @57 - that quiz question was not clear. + +# Refrences +* [Tomy's book](https://www.amazon.com/Minimum-JavaScript-Should-React-Redux/dp/1718043376?keywords=Minimum+Javascript&qid=1539595555&sr=8-1-fkmrnull&ref=sr_1_fkmrnull_1) diff --git a/src/main/md/MoviestoWatch.md b/src/main/md/MoviestoWatch.md new file mode 100644 index 00000000..df6fb4e4 --- /dev/null +++ b/src/main/md/MoviestoWatch.md @@ -0,0 +1 @@ +* Mercy Mission: the Rescue of Flight 771 \ No newline at end of file diff --git a/src/main/md/MyTasks/Aswath_Rithvik.md b/src/main/md/MyTasks/Aswath_Rithvik.md new file mode 100644 index 00000000..295d3a75 --- /dev/null +++ b/src/main/md/MyTasks/Aswath_Rithvik.md @@ -0,0 +1,40 @@ +* [Aswath tracking](https://www.khanacademy.org/math/in-eighth-grade-math) +* [Rithvik tracking](https://www.khanacademy.org/math/in-seventh-grade-math) + +# Rules +* Read your questions properly, if you read question wrong, your answer will be always wrong +* Loose concentration, become a fool + * Use 1-2-3 to gain concentration, Close your eyes, deep breath 5 times, think only your breath + +# Number System +# Rational Numbers: +* Properties of rational numbers. (including identities). Using general form of expression to describe properties +* Consolidation of operations on rational numbers. +* Representation of rational numbers on the number line +* Between any two rational numbers there lies another rational number (Making children see that if we take two rational numbers then unlike for whole numbers, in this case you can keep finding more and more numbers that lie between them.) +* Word problem (higher logic two operations, including ideas like area) + +# Powers +* Integers as exponents. +* Laws of exponents with integral powers +* Squares, Square roots, Cubes, Cube roots +* Square roots using factor method and division method for numbers containing (a) no more than total 4 digits nd (b) no more than 2 decimal places +* Cubes and cubes roots (only factor method for numbers containing at most 3 digits) +* Estimating square roots and cube roots. Learning the process of moving nearer to the required number. (iv) Playing with numbers +* Writing and understanding a 2 and 3 digit number in generalized form (100a + 10b + c , where a, b, c can be only digit 0-9) and engaging with various puzzles concerning this. (Like finding the missing numerals represented by alphabets in sums involving any of the four operations.) Children to solve and create problems and puzzles. +* Number puzzles and games +* Deducing the divisibility test rules of 2, 3, 5, 9, 10 for a two or three-digit number expressed in the general form. + + +# Algebra +* Algebraic Expressions +* Multiplication and division of algebraic exp.(Coefficient should be integers) +* Some common errors (e.g. 2 + x <> 2x, 7x + y . 7xy ) +* Identities (a ± b)2 = a2 ± 2ab + b2, )2 a2 – b2 = (a – b) (a + b) Factorisation (simple cases only) as examples the following types a(x + y), (x ± y)2, a2 – b2, (x + a).(x + b) Solving linear equations in one variable in contextual problems involving multiplication and division (word problems) (avoid complex coefficient in the equations) + +# Ratio and Proportion +* Slightly advanced problems involving applications on percentages, profit & loss, overhead expenses, Discount, tax. +* Difference between simple and compound interest (compounded yearly up to 3 years or half-yearly up to 3 steps only), Arriving at the formula for compound interest through patterns and using it for simple problems. +* Direct variation – Simple and direct word problems +* Inverse variation – Simple and direct word problems +* Time & work problems– Simple and direct word problems diff --git a/src/main/md/MyTasks/todo_project_ideas.md b/src/main/md/MyTasks/todo_project_ideas.md new file mode 100644 index 00000000..8a0271a7 --- /dev/null +++ b/src/main/md/MyTasks/todo_project_ideas.md @@ -0,0 +1,5 @@ +## RestFul URL and content search engine + a. Objective: Many restful url has plenty of data, but we can't search using every different url + b. Admin page, list all existing registered url, accept new restful-url + name + jpath expression to find presence of result + c. Search page, accept the content and search in all the url, display the content + d. pay attention to "bootstrap.zero" \ No newline at end of file diff --git a/src/main/md/NLP/StrongWords_CheckList.md b/src/main/md/NLP/StrongWords_CheckList.md new file mode 100644 index 00000000..68b98d6f --- /dev/null +++ b/src/main/md/NLP/StrongWords_CheckList.md @@ -0,0 +1,5 @@ +* How to extract validation/checklist statement from articles? + +## *Strong Words* +* Should +* Must diff --git a/src/main/md/Notes/self_selection.md b/src/main/md/Notes/self_selection.md new file mode 100644 index 00000000..2b9d5a2e --- /dev/null +++ b/src/main/md/Notes/self_selection.md @@ -0,0 +1,9 @@ +# http://nomad8.com/self-selection-blog/ +* https://pragprog.com/book/mmteams/creating-great-teams +* https://nomad8.com/how-we-measure-work-happiness/ +* https://nomad8.com/the-self-organising-organisation/ +* https://nomad8.com/total-squadification-large-scale-self-organisation/ +* https://nomad8.com/creating-great-teams-how-self-selection-lets-people-succeed/ +* https://nomad8.com/setting-up-boundaries-to-create-great-self-organising-teams/ +* Tribe - https://labs.spotify.com/2014/03/27/spotify-engineering-culture-part-1/ +* [Squad, Chapter, Tribe](http://www.full-stackagile.com/2016/02/14/team-organisation-squads-chapters-tribes-and-guilds/) \ No newline at end of file diff --git a/src/main/md/People/Awesome People.md b/src/main/md/People/Awesome People.md new file mode 100644 index 00000000..423a5d9f --- /dev/null +++ b/src/main/md/People/Awesome People.md @@ -0,0 +1,9 @@ +* [Herbert A Simon](https://en.wikipedia.org/wiki/Herbert_A._Simon) + * One who won noble and Turnig +* [William "Velvel" Morton Kahan] (https://rosettacode.org/wiki/Kahan_summation) + * Turing Award Winner + * Found the famous pentium bug +* [Claude Shannon](https://en.wikipedia.org/wiki/Claude_Shannon) + * The father of information theory + * First in AI, Cicuit and Logic Gate + * [A Mathematical Theory of Communication](http://math.harvard.edu/~ctm/home/text/others/shannon/entropy/entropy.pdf) \ No newline at end of file diff --git a/src/main/md/Petrinet.md b/src/main/md/Petrinet.md new file mode 100644 index 00000000..e65e0677 --- /dev/null +++ b/src/main/md/Petrinet.md @@ -0,0 +1 @@ +* http://sujitpal.blogspot.sg/2008/03/modeling-workflow-as-petri-net.html diff --git a/src/main/md/Podcast.md b/src/main/md/Podcast.md new file mode 100644 index 00000000..8e02d54a --- /dev/null +++ b/src/main/md/Podcast.md @@ -0,0 +1,37 @@ +* How to search good podcast, seach list of people you like, and find who interviewed them those podcast should be good. +* People who I like and follow + * Daniel Kehman - Conversations with Tyler + * Ray Dalio - https://www.tonyrobbins.com/podcasts/success-strategies-self-made-billionaire/ + * + + + +A Brief History of Mathematics:https://podcasts.files.bbci.co.uk/b00srz5b.rss +Business Statistics - Undergraduate:https://www.rjerz.com/c/bs/Podcasts/bsu.xml +Conversations with Tyler:http://cowenconvos.libsyn.com/rss +Data Crunch | Artificial Intelligence | AI | Machine Learning | Big Data | Data Science:https://vaultanalytics.com/feed/podcast/ +Data Skeptic Bonus Feed:https://s3.amazonaws.com/data-skeptic-bonus-feed/data-skeptic-bonus.xml +Data Skeptic:http://dataskeptic.libsyn.com/rss +DevOps Radio:http://devopsradio.libsyn.com/rss +Engineering Culture by InfoQ:http://feeds.soundcloud.com/users/soundcloud:users:258266127/sounds.rss +Freakonomics Radio:https://rss.art19.com/freakonomics-radio +FT Alphachat:http://rss.acast.com/ft-alphachat +High Probability Investing Show:http://feeds.soundcloud.com/users/soundcloud:users:413567328/sounds.rss +Inspired by Math!:http://feeds.feedburner.com/InspiredByMath +Learning Machines 101:http://learningmachines101.libsyn.com/rss +Machine Learning – Software Engineering Daily:https://softwareengineeringdaily.com/category/machine-learning/feed/ +More or Less Behind the Stats:http://www.bbc.co.uk/programmes/p02nrss1/episodes/downloads.rss +Not Another Fake NewsCast:http://pgmcast.libsyn.com/rss +O'Reilly Data Show - O'Reilly Media Podcast:http://feeds.podtrac.com/IOJSwQcdEBcg +Partially Derivative:http://feeds.feedburner.com/PartiallyDerivative +Pivotal Podcasts:https://pivotal.io/podcasts/feed +Planet Money:https://www.npr.org/rss/podcast.php?id=510289 +Software Engineering Daily:http://softwareengineeringdaily.com/feed/podcast/ +Software Engineering Radio - The Podcast for Professional Software Developers:http://feeds.feedburner.com/se-radio +The Changelog:http://feeds.feedburner.com/changelogshow +The InfoQ Podcast:http://feeds.soundcloud.com/users/soundcloud:users:215740450/sounds.rss +The Math Dude Quick and Dirty Tips to Make Math Easier:https://www.quickanddirtytips.com/xml/mathdude.xml +The Tony Robbins Podcast:http://tonyrobbins.libsyn.com/rss +This Week in Machine Learning & Artificial Intelligence (AI) Podcast:http://twimlai.libsyn.com/rss +WorkLife with Adam Grant:https://feeds.feedburner.com/WorklifeWithAdamGrant +Shane Parrish Podcast - https://fs.blog/the-knowledge-project/ \ No newline at end of file diff --git a/src/main/md/Productivity/GTD.md b/src/main/md/Productivity/GTD.md new file mode 100644 index 00000000..795e45a7 --- /dev/null +++ b/src/main/md/Productivity/GTD.md @@ -0,0 +1,56 @@ +* [GTD in 15 minutes – A Pragmatic Guide to Getting Things Done](https://hamberg.no/gtd/) +* [Dave Lee - My Productivity System](http://heydave.org/post/26770221775/my-productivity-system) + +* GTD - a trusted system for collecting tasks, ideas, and projects + * When your system and your trust in your system is in place, subconsciousness will stop keeping track of all the things you need to do and stop constantly reminding you +* Convert both tasks and whims into physical and visible actions +* Task List + * In + * As as soon as task identified, should move to In list + * Any kind of task should go here + * Should be accessible to log immediately, hence you can have more than one + * First time it would take an hour to dump all kinds of task + * Flow: ![alt text][flow] + * Flow: ![alt text][flow2] + * Output: The next action needs to be a physical and visible action. + * Next actions (probably several – more on that later) + * Anthing that needs to be done, soon as possible + * Waiting for + * Projects + * Actions that requires than one action with next possible action + * Research Machine Learning, Do XYZ Course + * Calendar + * Things you have to do on a certain date or at a certain time + * Only items that really are time and date sensitive on your calendar it will be more useful + * Anything doesn’t need to be done at a certain time will be on your next actions + * Someday/maybe + * Don’t lose your million-dollar idea + * Innovative thought project, passion + * Tasks goes here are thtat you don’t want to “pollute” in your next actions + * you might want to realize at some time in the future. +* Contexts + * "Tags” you put on the items on your next actions + * What equipment you need to perform it + * Could be location- @home, @computer, @office, @city or @store + * Agenda contexts + +# Weekly review + * “critical factor for success” + * Without this, we may complete the next action of a project and forget to add a new “next action” for that project. + * Forget to remove next actions + * Make sure each project has at least one next action. + * Ensure that each action on your next actions list is actually something you want or need to do if you have the time during the coming week. + * If not priority, move some of the next-action to some day/may be + * Review your some day list + * Are there any new project on your head, add to project list + +# Trigger list + * Trigger list is simply a list of key words to “trigger” your brain to remember any open loops you still haven’t captured in your system. + * Anything about Son, Studies, Project, Loan + * [Sample trigger list](https://lifehacker.com/5611657/use-a-weekly-review-list-to-stay-a-step-ahead-this-semester) + * Tickler file - 43 folders + * + + +[flow]: img/gtd_flow.png "GTD Flow" +[flow2]: https://hamberg.no/gtd/images/workflow.svg \ No newline at end of file diff --git a/src/main/md/Productivity/ParkinsonLaw.md b/src/main/md/Productivity/ParkinsonLaw.md new file mode 100644 index 00000000..6c8ba848 --- /dev/null +++ b/src/main/md/Productivity/ParkinsonLaw.md @@ -0,0 +1,27 @@ +* It is a commonplace observation that work expands so as to fill the time available for its completion. +* Example: Elderly women who has not work, trying to write a letter to her niece takes whole day. +* Work behaves like a gas: It fills all the available space (time) and the smaller the space the more pressure you get. +* Bureaucratic burden increases over time +* When you have a deadline it’s like a storm ahead of you or having a truck around the corner. It’s menacing and it’s approaching, so you focus heavily on the task – Eldar Shafir +* “When people sit down to do a task, they’ll put in a lot of effort initially. At some point there’s going to be diminishing returns on extra effort. To optimise productivity, you need to maximise benefits and minimise costs and find that inflection point, which is where you should start to wrap up.” + +## Bike-shed + +* A reactor is so vastly expensive and complicated that an average person cannot understand it, so one assumes that those who work on it understand it. On the other hand, everyone can visualize a cheap, simple bicycle shed, so planning one can result in endless discussions because everyone involved wants to add a touch and show personal contribution. +* This is a metaphor indicating that it is not necessary to argue about every little feature based simply on the knowledge to do so. Some people have commented that the amount of noise generated by a change is inversely proportional to the complexity of the change. + +* "the perfect advertisement would attract only one reply". Examples are given of job ads that hope to prove sufficiently off putting to the wrong candidates. + +## The law of multiplication of subordinates + +* The tendency of managers to hire two or more subordinates to report to them so that neither is in direct competition with the manager themself; +* The fact that bureaucrats create work for other bureaucrats. +* Among goverments, Cabinet size was negatively correlated with government effectiveness; political stability. + + + +## + +* [Law of triviality](https://en.wikipedia.org/wiki/Law_of_triviality) +* [A British historian famously wrote that work expands to fill available time – but what was he actually saying about inefficiency?](https://www.bbc.com/worklife/article/20191107-the-law-that-explains-why-you-cant-get-anything-done) +* [Further steps beyond Parkinson's Law: A replication and extension of the excess time effect](https://www.sciencedirect.com/science/article/pii/0022103167900297) \ No newline at end of file diff --git a/src/main/md/Productivity/Productivity_Tips.md b/src/main/md/Productivity/Productivity_Tips.md new file mode 100644 index 00000000..f87f331a --- /dev/null +++ b/src/main/md/Productivity/Productivity_Tips.md @@ -0,0 +1,6 @@ +* Always start day with TODO + * There will be plenty of disturbances and urgent issues. But Productivity can be increased using TODO +* Tiny little tools can make huge impact + * Don't let browser close the session, use tools that could minimize and keep session active + * Buy proper gears (proper keyboard + proper mouse + wifi network) + * Browswer that keep multiple concurrent session is extreemly useful in multi-tasking \ No newline at end of file diff --git a/src/main/md/Productivity/TypicalDay.md b/src/main/md/Productivity/TypicalDay.md new file mode 100644 index 00000000..44b231fd --- /dev/null +++ b/src/main/md/Productivity/TypicalDay.md @@ -0,0 +1,21 @@ +## Personal workflow +* Always follow GTD + + +## Office +* At the end of the day, log the stauts to nobody + * What is the lesson learnt? + +## Office +* Check all the mails + * Check inbox + * Check other folders + * Have you checked all the folders +* Check calendar + * Remove unwanted invites + * Do you need to send any invite + * Register/Remember/reollect any upcoming important meeting after working hours +* List the 3 important tasks for the day + * Does it align with important 3 goals of week/month +* Before leaving + * Update status to no-body diff --git a/src/main/md/Productivity/img/gtd_flow.png b/src/main/md/Productivity/img/gtd_flow.png new file mode 100644 index 00000000..2f2933c8 Binary files /dev/null and b/src/main/md/Productivity/img/gtd_flow.png differ diff --git a/src/main/md/Python/Python.md b/src/main/md/Python/Python.md new file mode 100644 index 00000000..912ac656 --- /dev/null +++ b/src/main/md/Python/Python.md @@ -0,0 +1,23 @@ +* [Guido van Rossum](http://neopythonic.blogspot.com/) +* [Transforming Code into Beautiful, Idiomatic Python](https://gist.github.com/0x4D31/f0b633548d8e0cfb66ee3bea6a0deff9) + +## Python Best Book +* Fluent Python + +## Learning Python +* [Effective Python by Brett Slatkin](https://learning.oreilly.com/videos/effective-python/9780134175249) +* [Modern Python LiveLessons: Big Ideas and Little Code in Python](https://learning.oreilly.com/videos/modern-python-livelessons/9780134743400) +* [Fluent Python](https://learning.oreilly.com/oriole/fluent-python) + +##Python Influential People + +* David Beazley + * Python Cookbook + * [Keynote PyCon India 2019](https://www.youtube.com/watch?v=VUT386_GKI8) +* Luciano Ramalho + * Fluent Python +* Raymond Hettinger + * Transforming Code into Beautiful, Idiomatic Python +* Alex Martelli +* Steve Holden + * Steve Holden (Keynote): Python: Scalable from Microcontroller to Supercomputer diff --git a/src/main/md/Python/PythonPackages.md b/src/main/md/Python/PythonPackages.md new file mode 100644 index 00000000..e16f3fc8 --- /dev/null +++ b/src/main/md/Python/PythonPackages.md @@ -0,0 +1 @@ +* [fuzzywuzzy](https://github.com/seatgeek/fuzzywuzzy) diff --git a/src/main/md/Python/typed_python.md b/src/main/md/Python/typed_python.md new file mode 100644 index 00000000..a1234222 --- /dev/null +++ b/src/main/md/Python/typed_python.md @@ -0,0 +1,10 @@ + +* [Type Hints by Guido van Rossum (March 2015)](https://www.youtube.com/watch?v=YFexUDjHO6w) +* [Guido van Rossum - Type Hints for Python 3.5](https://www.youtube.com/watch?v=Yqnrfa5ri7E) +* [Stanford Seminar - Optional Static Typing for Python](https://www.youtube.com/watch?v=GiZKuyLKvAA) + +* [A Static Type Inference for Python](http://scg.unibe.ch/download/dyla/2011/dyla11_submission_3.pdf) +* [Gradual Typing for Python 3](https://pycon.org.il/2016/static/sessions/eli-gur-typehints.pdf) +* [From Optional to Gradual Typing via Transient Checks](http://homes.sice.indiana.edu/mvitouse/papers/stop16.pdf) +* [mypy – Python’s Gradual - Typing Implementation](https://pycon.org.il/wwwpyconIL/sites/default/files/inline-files/itzik-kasovitch.pdf) +* [mypy documentation](https://buildmedia.readthedocs.org/media/pdf/mypy/latest/mypy.pdf) \ No newline at end of file diff --git a/src/main/md/RLang/R.lang.md b/src/main/md/RLang/R.lang.md new file mode 100644 index 00000000..66856c78 --- /dev/null +++ b/src/main/md/RLang/R.lang.md @@ -0,0 +1,143 @@ +* To print all global variable + * str(as.list(.GlobalEnv)) +* summary(variable) +* Combining Plots - R makes it easy to combine multiple plots into one overall graph, using either the par() or layout() function. + +```R +str(as.list(.GlobalEnv)) +tbl = table(data$Cheat) +100*tbl/sum(tbl) # Percentage of table numbers +pie(tbl) +cor(a$longevity,a$gestation) +summary(dataframe) +``` + +``` +## Filter +flicker[flicker$color == "brown",] + +## filter and project only one column +flicker[flicker$color == "brown",]$cff +``` + +``` +a <- load("C:/Users/nikia/OneDrive/Math/Statistics/nightlight.RData") +head(nightlight) +tail(nightlight) +dnow <- data.frame(x=rnorm(100), y=runif(100)) +dnow[1:4,] +dnow[dnow$x>0.0,] +typeof(dnow) +x <- c(0,1,2,3,4,5,6,7,8,9) +myGlobals <- objects() +typeof(get(myGlobals[2])) +``` + +``` +a <- load("C:/Users/nikia/OneDrive/Math/Statistics/body_image.RData") +plot(body_image$HS_GPA, body_image$GPA, xlab="HS", ylab="Col") +cor(body_image$HS_GPA, body_image$GPA,use="complete.obs") +fem_summary <- summary(body_image[body_image$Gender=="Female",]$WtFeel) +m_summary <- summary(body_image[body_image$Gender=="Male",]$WtFeel) +obj <- 11:15 +names(obj) <- c("Num1", "Sum1", "Lum1", "Dum1", "Rum1") +values <- unname(obj) +f_pct <- round(fem_summary/sum(fem_summary)*100) +m_pct <- round(m_summary/sum(m_summary)*100) +lbls <- names(fem_summary) +lbls <- paste(lbls, f_pct) +lbls <- paste(lbls, "%",sep="") +pie(fem_summary,labels = lbls) +lbls <- names(m_summary) +lbls <- paste(lbls, m_pct) +lbls <- paste(lbls, "%",sep="") +pie(m_summary, labels=lbls) + +Filter(function(x) x > 3.291, body_image[1:10,]$GPA) +``` + + +``` +L=lm(data$GPA~data$HS_GPA); +abline(L); +cf=coefficients(L); +lt=paste("GPA = ",round(cf[1],2),"+",round(cf[2],2),"HS_GPA") +legend(1.7,4.3,lt) +plot(data$HS_GPA,data$GPA) +abline(L); +``` + + +``` +tbl = table(data.frame(data$Gender,data$WtFeel)); tbl +100*tbl/rowSums(tbl) # to view them in percent +plot(factor(data$Seat), data$GPA)) # side by side comparision +tapply(data$Seat, factor(data$GPA), summary) +``` + +> tapply(data$GPA, data$Seat, summary) +$B + Min. 1st Qu. Median Mean 3rd Qu. Max. NA's + 2.000 2.678 3.000 2.974 3.237 4.060 1 + +$F + Min. 1st Qu. Median Mean 3rd Qu. Max. NA's + 1.920 3.000 3.330 3.251 3.700 4.100 1 + +$M + Min. 1st Qu. Median Mean 3rd Qu. Max. NA's + 1.910 2.780 3.000 3.119 3.505 4.380 6 + + +## R-libraries for plotting + 1. ggplot2 + 2. plotly + 3. Rmarkdown + 4. Flexdashboard + 5. Knitr + 6. Reshape2 + 7. Elastic + 8. Httr + 9. Plyr + 10. Dplyr + 11. Scales + 12. Lubridate + 13. Plotly graphs: https://plot.ly + +```R +pct_table = 100*prop.table(table(random_sample$Handed)); +barplot(rbind(pop_percent,random_sample_percent), beside=T, col=c(0,1),legend.text=T,xlab="Handedness",ylab="Percent in Group",args.legend=list(x="topleft")) +random_sample_percent = 100*summary(random_sample$Handed)/length(random_sample$Handed); + +par(mfrow=c(1,2)); #1 row, 2 columns +``` + +```R +group = sample(1:3,450,replace=TRUE) # choose 1/2/3 450 samples +df = cbind(df, group) +``` + +```R +# Two way table for existing dataframe +tbl = table(data.frame(data$Treat,data$Outcome)) +tbl/rowSums(tbl) * 100 +``` + +data.Treat 0 1 + 0 37.83784 62.16216 + 1 71.05263 28.94737 + 2 32.35294 67.64706 +``` +> tapply(data$Time, factor(data$Treat), summary) +$`0` + Min. 1st Qu. Median Mean 3rd Qu. Max. + 0.286 5.286 22.000 37.726 67.000 165.000 + +$`1` + Min. 1st Qu. Median Mean 3rd Qu. Max. + 0.571 23.000 70.714 63.064 101.500 206.000 + +$`2` + Min. 1st Qu. Median Mean 3rd Qu. Max. + 0.000 5.393 17.786 37.584 61.714 131.000 +``` diff --git a/src/main/md/TechnicalAnalysis/Investment_Books.md b/src/main/md/TechnicalAnalysis/Investment_Books.md new file mode 100644 index 00000000..16e0c55c --- /dev/null +++ b/src/main/md/TechnicalAnalysis/Investment_Books.md @@ -0,0 +1,5 @@ +* The Boglehead’s Guide to Investing by Taylor Larimore - It's based on the investing principles of John C. Bogle, the founder & CEO of Vanguard. +* A Random Walk Down Wall Street by Burton Malkiel +* One Up on Wall Street by Peter Lynch +* The Little Book of Common Sense Investing by Bogle +* The Intelligent Investor by Benjamin Graham \ No newline at end of file diff --git a/src/main/md/TechnicalAnalysis/icici_analysis.md b/src/main/md/TechnicalAnalysis/icici_analysis.md new file mode 100644 index 00000000..d3671f80 --- /dev/null +++ b/src/main/md/TechnicalAnalysis/icici_analysis.md @@ -0,0 +1,19 @@ +# "mailimages" site:icicidirect.com filetype:pdf(https://www.google.com.sg/search?q=mailimages++site:icicidirect.com+filetype:pdf&num=50&tbs=qdr:y,sbd:1) + +* [Derivatives](https://www.icicidirect.com/mailimages/Derivatives_view.pdf) +* [Support & Resistance](https://www.icicidirect.com/mailimages/Technical_Support_and_Resistance.pdf) +* [Monthly technical](https://www.icicidirect.com/mailimages/MonthlyTechnical.pdf) +* [Derivatives Monthly Outlook](https://www.icicidirect.com/mailimages/ICICIdirect_MonthlyTrend.pdf) +* [Mutual Fund Review](https://www.icicidirect.com/mailimages/ICICIdirect_MonthlyMFReport.pdf) +* [Momentum Picks](https://www.icicidirect.com/mailimages/Momentum_Picks.pdf) +* [Research Scorecard](http://content.icicidirect.com/mailimages/scoreboard.pdf) +* [Daily Opening Bell](http://content.icicidirect.com/mailimages/openingbell.pdf) +* [Monthly Corporate Action Tracker](http://content.icicidirect.com/mailimages/IDirect_CorporateActionTracker_May18.pdf) +* [Research Product Basket](http://content.icicidirect.com/mailimages/ResearchProduct.pdf) +* [Daily Currency outlook](http://content.icicidirect.com/mailimages/Currency_outlook.pdf) + +* [ICICI Money manager](http://content.icicidirect.com/idirectcontent/MoneyManagerMagazine/Monthly_Issue.pdf) +* Google: IPO Review site:icicidirect.com filetype:pdf +* Google: -mailimages site:icicidirect.com filetype:pdf + + diff --git a/src/main/md/TechnicalAnalysis/technical_analysis.md b/src/main/md/TechnicalAnalysis/technical_analysis.md new file mode 100644 index 00000000..d536a7a3 --- /dev/null +++ b/src/main/md/TechnicalAnalysis/technical_analysis.md @@ -0,0 +1,47 @@ +* Technical analysis + * funtion ( price_*history*, volume, open interest) => pattern +* Rational analysis or fusion analysis = TA + FA +* Investor and newsletter polls, and magazine cover sentiment indicators, are also used by technical analysts + * How to find sentiment analysis of large fund managers? +* Empirical evidence + * Nonlinear prediction using neural networks occasionally produces statistically significant prediction results. + * A Federal Reserve working paper[19] regarding support and resistance levels in short-term foreign exchange rates "offers strong evidence that the levels help to predict intraday trend interruptions +* Technical trading strategies were found to be effective in the Chinese marketplace + * Contrarian version of the moving-average crossover rule + * The channel breakout rule + * Bollinger band trading rule +* Head-and-shoulders or double-bottoms were considered very useful +* Techniques such as Drummond Geometry attempt to overcome the past data bias by projecting support and resistance levels from differing time frames into the near-term future and combining that with reversion to the mean techniques +* Thomas DeMark's indicators enjoy a remarkable endorsement in the financial industry. + * DeMark indicators - Sequential + * DeMark indicators - Combo + * DeMark indicators - Setup Trend + * The Sequential indicator alone showed statistically significant price moves on a wide range of commodities +* Recent research suggests that combining various trading signals into a Combined Signal Approach may be able to increase profitability and reduce dependence on any single rule. + +# MACD +* [Technical Analysis Indicator MACD](https://www.youtube.com/watch?v=OR8vwFv-5iU) +* Centered oscillator - Oscillator above zero line is bullish, below zero line is bearish mode +* If you compare MA with MACD, MACD would have given signals much sooner + +# EFH +* The efficient-market hypothesis (EMH) states price inclusive of all the information +* As per EMH contradicts the basic tenets of technical analysis by stating that past prices cannot be used to profitably predict future prices. + +# Rational thoughts (with biasness for pattern from technician) +* The emotions in the market may be irrational, but they exist. Because investor behavior repeats itself so often, +* Technicians believe that recognizable (and predictable) price patterns will develop on a chart. +* Recognition of these patterns can allow the technician to select trades that have a higher probability of success + +* As ANNs are essentially non-linear statistical models, their accuracy and prediction capabilities can be both mathematically and empirically tested. In various studies, authors have claimed that neural networks used for generating trading signals given various technical and fundamental inputs have significantly outperformed buy-hold strategies as well as traditional linear technical analysis methods when combined with rule-based expert systems +* + +# Reference +* https://cmtassociation.org/chartered-market-technician/cmt-level-1/ +* https://en.wikipedia.org/wiki/Chartered_Market_Technician +* https://www.amazon.com/CMT-Level-2018-Introduction-Technical/dp/1119474531 +* [The Handbook of Technical Analysis + Test Bank: The Practitioner's Comprehensive Guide to Technical Analysis (Wiley Trading) 1st Edition](https://www.amazon.com/Handbook-Technical-Analysis-Test-Bank) +* [Technical Analysis](https://en.wikipedia.org/wiki/Technical_analysis) +* [DeMark Indicators by Jason Perl](http://www.mysmu.edu/faculty/christophert/QF206/Week_05.pdf) +* https://en.wikipedia.org/wiki/Drummond_geometry + * [Drummond Geometry by Ted Hearne](https://www.youtube.com/watch?v=mf_6plh6agw) \ No newline at end of file diff --git a/src/main/md/Tools/Drools.md b/src/main/md/Tools/Drools.md new file mode 100644 index 00000000..c90c2b3b --- /dev/null +++ b/src/main/md/Tools/Drools.md @@ -0,0 +1,143 @@ +# When to use rules engine +* Business logic which you think is getting cluttered with multiple if conditions due to complexity +* Business logic requires special treatment like version control, development, debugging, sme-managed, decision-table-driven +* Domain business logic complexity would be growing exponentially +* Business logic would be keep changing (seasonl, competitor driven, market-driven) +* Deleate business rools using single line code 'ruleEngine.applyRules(product);' +* Corporate decidedto put a rule admin console in the hands of non-technical people. + +# Drools +* Latest version = 7.17.0 as on 20-Feb-2019 +* no-loop: avoid the re-activation of a rule caused by the RHS of that SAME rule. +* lock-on-active: avoid the re-activation of a rule NO MATTER what the cause is. + + +# DSL - DRL +## DSL (Or dictionary) +* The Dictionary file (or DSL) is a text file (with a .dsl extension) that contains DSL entries. +* Dictionary file that defines the translation of business concepts and DRL is simply called DSL in Drools +* DSL used only during compile time to create rules. +* DSL Contains the transformation +* Replaced everything using regex, focus on what is left after '=' + + +## DSLR - DSL Rules +* File containing rules using business concepts is called DSLR +* SME is in charge of the creation and maintenance of the business rules (DSLR). + +```DSL +# Simple DSL example file +[keyword]avoid looping=no-loop true +[when]There is a Customer=$c:Customer() +[when]- with age between {low:\d*} and {high:\d*}=age >= {low}, age <= {high} +[when]- who is older than {low:\d*}=age > {low} +[when]- without a Category set=category == Customer.Category.NA +[then]Set Customer Category to {category:\w*}=modify($c)\{ setCategory(Customer.Category.{category}) \}; +``` + +```DSLR +rule "Categorize Customers between 22 and 30" +avoid looping +when + There is a Customer + - with age between 22 and 30 + - without a Category set +then + Set Customer Category to BRONZE +end +``` +## Compiled using above two DSL + DSLR = DRL +```DRL +rule "Categorize Customers between 22 and 30" +no-loop true +when + $c: Customer( age >= 22, age <= 30, category == Customer.Category.NA) +then + modify($c){ setCategory(Customer.Category.BRONZE) } +end +``` + +# DSL - Decision Tables + +## Decision Table +* Depends on org.drools:drools-decisiontables +* Rules using same structure could be used with decision table +* Defined in XLS, or CSV + * Only the first worksheet of an XLS file will be scanned for rule definitions. +* Decision tables structure - Strict structure needs to be maintained +* Keywords: + * RuleSet, RuleTable, CONDITION and ACTION + * Sequential, EscapeQuotes + * Variables, Functions + * Queries, Declare +* RuleSet + * Row indicates the begining of the rules + * Column indicates other subsequence keyword column + * "RuleSet " default is rule_table + * Attributes in the RuleSet section will affect the entire package where the rules are defined. + * This may include rules defined in other assets outside the decision table where they are defined. +* RuleTable + * NAME + * DESCRIPTION + * CONDITION + * ACTION + * METADATA + +* CONDITION + * Left-hand-side of the rule + * $param - can be used to specify a comma separated values further down in the column + * $1, $2, and so on, can be used to access each individual value + * forall(delimiter){snippet} +* ACTION + * The value of the cell in the next row is optional and, if present, it represents an object reference, a global variable + * When a condition is only composed of a binary operator (such as ==, >, <, and so on), the use of $param is optional. + * Drools will understand that the interpolation value has to be placed at the end of the condition. +* Debugging decision table + * Pay attention the log of the compilation + * Message [id=3, level=ERROR, path=chapter07/dtable-simple/customer-classification-simple.xls, line=24, column=0 text=[ERR 102] Line 24:29 mismatched input '=' in rule "Simple Customer Categorization_13"], + * Dump the DRL compiled into console/file for debugging + * + ```java + import org.drools.decisiontable.DecisionTableProviderImpl; + InputStream dtableIS = //get the input stream to the decision table file + DecisionTableProviderImpl dtp = new DecisionTableProviderImpl(); + String drl = dtp.loadFromInputStream(dtableIS, null); + ``` + * The line and column numbers in the error messages can be traced to the DRL generated by the DecisionTableProviderImpl class. +* Decision tables are not ideal for every situation: one of the biggest limitations is that rules we can model using decision tables must have the same structure. + +## Drolls language LHS +```DRL + /* find map */ + Student($markMap : markMaap) + eval($markMap["Maths"] > 90) + /* find map and check condition */ + $student: Student($marks : markMap, $marks.get("Maths") > 90) + /* find map and check key presence and check condition */ + $student: Student($marks : markMap, $marks.containsKey("Maths"), $marks.get("Maths") > 90) + Cheese( type == "stilton" && price < 10, age == "mature" ) + /* alernative way to access map */ + $account : Account( hm[ "A" ] == "B" ) + /* Access map inside List */ + $map : Map(this["issue"] != "") from $rAlerts.reservationMap +``` +### Handle list of hasmap +```#DRL +rule "Validate test" +when + SomeClass($tMap : outHash) + e: Map.Entry(k:key, v:value) from $tMap.entrySet() + DateSet( tt: trainingType == "NEW" ) from v +then + System.out.println(e.getKey() + "-" + tt); +end +``` + + + +## Reference +* [When to use rules engine](https://stackoverflow.com/questions/2167358/pros-and-cons-of-java-rules-engines) +* [drools-dsl](https://github.com/integrallis/drools-dsl/tree/master/src/main/resources/rules) +* [DSLR](https://training-course-material.com/training/Filip_Drools_-_DSL) +* [Decision Tables](https://docs.jboss.org/drools/release/6.5.0.Final/drools-docs/html/ch06.html#d0e5713) + diff --git a/src/main/md/Tools/Graph.md b/src/main/md/Tools/Graph.md new file mode 100644 index 00000000..3b52d266 --- /dev/null +++ b/src/main/md/Tools/Graph.md @@ -0,0 +1,11 @@ +* PageRank algorithms +* THE PREGEL PROCESSING MODEL + * Idea behind Pregel + * One vertex can send a message to another vertex + * Typically those messages are sent along the edges in a graph + * In each iteration, a function is called for each vertex, passing it all the messages that were sent to it + * a vertex remembers its state in memory from one iteration to the next + * The function only needs to process new incoming messages. If no messages are being sent in some part of the graph, no work needs to be done + * Pregel is similar to actor model +* GraphChi - Graph processing framework +* FlumeJava \ No newline at end of file diff --git a/src/main/md/Tools/JHipster.md b/src/main/md/Tools/JHipster.md new file mode 100644 index 00000000..d7eb489c --- /dev/null +++ b/src/main/md/Tools/JHipster.md @@ -0,0 +1,174 @@ +# What are microservices? +* Do one thing well! +* The phrase “Micro-Web-Services” was first used at a cloud computing conference by Dr. Peter Rodgers in 2005 +* It is not new, May-2011 - first time this term was used +* Composing application like unix like business services +* Fine grained web service @ webscale by Adrian Cockcroft +* Martin Fowler and James Lewis - Micro Service article + + +# Spring Boot +* Introduced in 2014, and removed configuration +* Build executable jar instead of war (directly execute) +* Spring Boot 2 - Reactive based +* [Spring Initializr](start.spring.io) +* @RepositoryRestResource - Can expose DAO on Rest end points +* Feign - Declarative REST clients with spring-cloud-netflix Feign +* Zuul - Intelligent and programmable routing with spring-cloud-netflix Zuul +* Hystrix - Circuit breaker with spring-cloud-netflix Hystrix + +## Security +* OAuth 2.0 and OIDC +* Having a secret in SPA is big anti-pattern +* Find provider like Keycloak or Okta +* Spring security + * Handles security redirect + * Intercepts + * Sets security session +* OAuth + * Delegated authentication system using 3rd party provider. + * Authenticate using 3rd party, but not authentication prototocol. + * Obtain ID Token and/or access token + * No way to get user information in OAuth +* Open Id Connect : Facebook, LinkedIn, Google or Microsoft + * Built on top of OAuth + * JWT - is just a string token (pronounced as JAAT) Json web token + * User-end-point is returned, we can find more information about the user + +# JHipster documentations +* [Jhipster Presentation] (https://www.jhipster.tech/presentation/#/) + + + +# JHipster +* Goals + * High performance, Robust Java service side Spring Boot application + * Mobile first front using React/Angular and BootStrap + * Microservice using Netflix-OSS, ELK and Docker + * Powerful workflow build using NPM, Webpack, Yeoman and Maven/Gradle +* UAA - User authentication and authorization server // JHipster can generate +* Always generate Gatway server + * Everything related to UI sits on the gateway + * Uses zull and hystrix + * Handles/Proxies all backend microservices +* JHipster 5 = JHipster + React + Spring Boot 2 +* KeyCloak - Open Source Identity and Access Management +* Subgenerator is a upgrade tool for JHipster +* .yo-rc.json - Contains all the selection made, This file could be used to upgrade/generate using newer JHipster +* JHispter mini-book is written using ASCIDoctor +```bash +npm install -g yo generator-jhipster or yarn global add generator-jhipster +yarn global add e2e +yo jhipster +npm start +./mvnw spring-boot:run +./mvn spring-boot:run -Dspring-boot.run.arguments=--logging.level.org.springframework=TRACE,--logging.level.com.bank.module=TRACE +./gradlew bootRun #alternative to mvn +jhipster entity Foo User +yarn e2e #Test end-2-end +yo jhipster:entity +jhipster entity #(alternative to above syntax) +``` +* Architecture: ![Micoservices Architecture][Arch] +* To convert existing JHipster application into PWA (Progressive web app) + * Read reade.md and search for pwa/progressive in gateway application + * gateway-app/src/main/webapp/index.html - Uncomment service worker './sw.js' + * Uncomment in webpack-common.js and ensure sw.js is copied + * There should be service-worker in root directory ensure it is there (there was a bug in JHipster) + * + + +## JDL +* JHipster domain language +* [jdl-samples](https://github.com/jhipster/jdl-samples/blob/master/blog.jh) +* [JDL studio](https://start.jhipster.tech/jdl-studio/) +``` +* Follow the steps to create server and client side code +* Create JDL +* jhipster import-jdl store.jdl +* jhipster entity product #command to add new entity to an existing jhipster project +* #use the widget to add entity to an existing project +``` + +## JHipster Registry +* JHipster registry is an Eureka server +* It is SPring Cloud Config Server +* Dashboard for monitor and manage applications + + +## Tutorial for eureka-server +* Add spring-boot:run in pom.xml, as part of for default goal +* Alternatively using ./mvnw spring-boot:run if you don't have default goal +* Access Eureka service via http://localhost:8761/ + +## Add "beer-catalog-service" +* Generate project using following + * Actuator for monitoring + * JPA, H2 + * Rest repostitories + * Devtools + * Lombok - Java annotation library which helps to reduce boilerplate code and code faster + +## Progressive Web App +* Mobile first web development +* Answer to slow internet connection, and web-app has to load in less than 3 seconds +* Firefox supports using service worker, service worker can intercept network call, and could provide offfline content +* Google trying to reduce number of mobile-apps +* Use PRPL pattern + * Push critical resources for the initial URL route + * Render iniital route + * Pre-cache remaining route + * Lazl-load and create reamining routes on demand + +## Jhipster Docker Container +```bash +jhipster docker-compose +#It might ask other sub images to be build first +#JHipster docker compose sub-generator +./mvnw verify -Pprod dockerfile:build /app/blog/AppDocker +./mvnw verify -Pprod dockerfile:build /app/blog/gatewayDocker +./mvnw verify -Pprod dockerfile:build /app/blog/storeDocker +``` +# To start the JHipster docker app +```bash +# we can use kinematic to view docker images during development time +docker-compose up +``` +## Jhipster cloud deployment +```bash +# To generate for Heroku for monoliths +yo jhipster:heroku +# To generate for CloudFoundry only for postgresql +yo jhipster:cloudfoundry +# On AWS - https://www.jhipster.tech/aws/ +jhipster aws-containers --skip-checks -d +jhipster aws +# On GCP - Google Cloud@https://github.com/oktadeveloper/jhipster-microservices-example +jhispter kubernetes +# https://www.youtube.com/watch?v=dgVQOYEwleA&feature=youtu.be + +``` + + +# Reference +* [Julien Dubois](https://www.julien-dubois.com/jhipster.html) + * https://twitter.com/juliendubois +* https://start.jhipster.tech/ +* [Microservices](https://martinfowler.com/articles/microservices.html) +* (https://www.jhipster.tech/microservices-architecture/) +* Play by Play: Developing Micorservices Matt Raible +* [Get Started with JHipster 4](http://www.eclipse.org/community/eclipse_newsletter/2017/january/article3.php) +* [The JHipster Mini-Book 4.5](https://www.infoq.com/minibooks/jhipster-4x-mini-book) +* [Microservices-for-the-masses-with-spring-boot-angular-and-jhipster-codeone-2018](https://speakerdeck.com/mraible/microservices-for-the-masses-with-spring-boot-angular-and-jhipster-codeone-2018) +* [spring-boot-microservices-example](https://github.com/oktadeveloper/spring-boot-microservices-example) +* [Security with spring-boot-microservices-example - Okta Oauth branch](oktaoath@https://github.com/oktadeveloper/spring-boot-microservices-example) +* [Build a Microservices Architecture for Microbrews with Spring Boot](https://developer.okta.com/blog/2017/06/15/build-microservices-architecture-spring-boot) +* (On GCP)[https://www.youtube.com/watch?v=dgVQOYEwleA&feature=youtu.be] +* spring.io/guides +* https://github.com/mraible/jhipster5-demo +* https://asciidoctor.org/ +* https://scotch.io/tutorials/the-ultimate-guide-to-progressive-web-applications + +[Arch]: ../img/microservices_architecture_2.png "JHipster micoservices architecture" + + diff --git a/src/main/md/Tools/Lucene.md b/src/main/md/Tools/Lucene.md new file mode 100644 index 00000000..874759b2 --- /dev/null +++ b/src/main/md/Tools/Lucene.md @@ -0,0 +1,61 @@ +* Lucene - inverted index + * Index - Page to words (Table of content) + * Inverted Index - Words to page (Back of the book index) + * Indexing involves adding Documents to an IndexWriter + * Searching involves retrieving Documents from an index via an IndexSearcher +* Lucene has Document, IndexWriter, IndexSearcher + * Document has Fields + * IndexWriter has IndexWriterConfig and Directory (index) + * IndexWriterConfig has Analyzer + * IndexSearcher has IndexReader + +* Analyzers + * used during ingestion, when a document is indexed, and at query time. + * An analyzer examines the text of fields and generates a token stream. + * Analyzers may be a single class or they may be composed of a series of tokenizer and filter classes. +* Tokenizers break field data into lexical units, or tokens. +* Filters : Filters examine a stream of tokens and keep them, transform or discard them, or create new ones. + * Tokenizers and filters may be combined to form pipelines, or chains, where the output of one is input to the next. + * Sequence of tokenizers and filters is called an analyzer and the resulting output of an analyzer is used to match query results or build indices. + * org.apache.lucene.analysis.core.StopFilter + +Filters examine a stream of tokens and keep them, transform or discard them, or create new ones. Tokenizers and filters may be combined to form pipelines, or chains, where the output of one is input to the next. Such a sequence of tokenizers and filters is called an analyzer and the resulting output of an analyzer is used to match query results or build indices. + +* Every record in Lucene is considered as document, and document will have one or more fields + ```Java + Document doc = new Document(); + doc.add(new TextField("title", "Lucene in Action", Field.Store.YES)); + doc.add(new StringField("isbn", "193398817", Field.Store.YES)); + ``` +* Documents are stored in index + ```Java + Directory index = new RAMDirectory() + IndexWriter indexWriter = new IndexWriter(index, new IndexWriterConfig(new StandardAnalyzer())); + indexWriter.addDocument(doc); + ``` +* ```Java + Query q = new QueryParser("title", analyzer).parse(querystr); + IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(index)); + TopDocs docs = searcher.search(q, hitsPerPage); + ScoreDoc[] hits = docs.scoreDocs; + ``` + +* Lucene Query Syntx +```yaml +title:foo #Search for word "foo" in the title field. +title:"foo bar" +title:"foo bar" AND body:"quick fox" +(title:"foo bar" AND body:"quick fox") OR title:fox +title:foo -title:bar +title:foo* #Search for any word that starts with "foo" in the title field. +title:foo*bar #Search for any word that starts with "foo" and ends with bar in the title field. +"foo bar"~4 #Search for "foo bar" within 4 words from each other. +mod_date:[20020101 TO 20030101] #Range Queries allow one to match documents whose field(s) values are between the lower and upper bound specified by the Range Query. +(title:foo OR title:bar)^1.5 (body:foo OR body:bar) #Assigning higher boosts to title matches than to body content matches - Query time boost +``` + +# References +* [Lucene in 5 minutes](http://www.lucenetutorial.com/lucene-in-5-minutes.html) +* [Basic Concepts](http://www.lucenetutorial.com/basic-concepts.html) +* [Query syntax](http://www.lucenetutorial.com/lucene-query-syntax.html) +* [Analyzers, Tokenizers, and Filters](https://lucene.apache.org/solr/guide/6_6/understanding-analyzers-tokenizers-and-filters.html) \ No newline at end of file diff --git a/src/main/md/Tools/Modern_FrontEnd_Web_Development.md b/src/main/md/Tools/Modern_FrontEnd_Web_Development.md new file mode 100644 index 00000000..acf489d3 --- /dev/null +++ b/src/main/md/Tools/Modern_FrontEnd_Web_Development.md @@ -0,0 +1,10 @@ +* https://www.jhipster.tech/tech-stack/ +* HTML5 Boilerplate +* https://www.thymeleaf.org/ + +## Other tools +* Sass +* Google analytics (more than decade and still modern) +* http://humanstxt.org/ +* Modernizr +* Normalize.css diff --git a/src/main/md/Tools/MongoDB.md b/src/main/md/Tools/MongoDB.md new file mode 100644 index 00000000..f3aa8f8a --- /dev/null +++ b/src/main/md/Tools/MongoDB.md @@ -0,0 +1,8 @@ +```SQL + net start "MongoDB Server" + net stop "MongoDB Server" + show dbs; + use libraryApp; + db.books.findOne().pretty() + db.books.find().pretty() +``` \ No newline at end of file diff --git a/src/main/md/Tools/Spring5/SpringFramework.md b/src/main/md/Tools/Spring5/SpringFramework.md new file mode 100644 index 00000000..8b92c1e5 --- /dev/null +++ b/src/main/md/Tools/Spring5/SpringFramework.md @@ -0,0 +1,359 @@ +# Spring-Core Framework + +* Gloryfied hashmap (applicationContext) that maintains toplogy of beans +* + +* Java.lang.Object is surrogated with 3 features + * Dependency Injection + * Aspect Oriented Programming + * Enterprise Service Abstractions + + + +* STS - Is a tool (Eclipse based) + + +## Enterprise Service Abstractions + +* Cacheable annotation can cache the result of method invocation + * @Cacheable("customers") + getCustomerById(long customerId) + * You can plug any cache provider +* Controller + * @Controller - Spring automatically connect methods in the controller bean to an http-endpoint +* Configuration + * Toplogy between beans are specified via config (xml, annotation or JavaConfig) + * Toplogy - how one bean related to other beans + * In xml, every bean has an id and type + * In JavaConfig - Spring assigns id + * All @Bean annotated methods are invoked and keep the bean inside applicationContext (not lazy) +* @PropertySource - can load all environment properties or external configuration files +* By default, Spring in UnOpinionated, hence we have explicitly turn-on the feature using annotations + * Annotation would require other annotations to enable annotations + * @EnableTransactionManger (without this Transaction annotations won't be effective) + * @ComponentScan (without this @Repository, @Controller won't be effective) + + +## Configuration +* ClassPathXmlApplicationContext("service-config.xml") +* FileSystemXmlApplicationContext(new File("/service-config.xml")) + * XML - all configruation at one-place is good, no compile time checking is bad. +* @Configuration + * Compile time checking, but not a single location to view the config, a bit of boiler-plate code for simple beans +* AnnotationConfigApplicationContext(ServiceConfigurationPackage.class) + * AnnotationConfigApplicationContext(ServiceConfigurationPackage.class.getPackage().getName()) + * Nice compromize between xml and @Configuration, Reduces boiler plate @Bean declarations + * Auto detects beans from package using @ComponentScan + +## Injections (3 ways to do) +* @Autowired + * Classic spring annotaion for injection +* @Inject + * Java.Inject +* @Resource + * JSR - 250 based annotation + +## Beans +* Bean lifecycle callbacks +* Bean scopes +* Bean post-processors (BeanFactory methods) +* AOP - Add new features to existing beans using AOP + + +# Bean lifecycle callbacks +* We have three options (interface, SmartLifeCycle interface, annotation) +* Interface like InitializingBean or DisposableBean - Spring-legacy way + * init-method, andd destroy-method + * InitializingBean gives access to afterPropertiesSet() + * DisposableBean gives access to destroy() +* @PostConstruct, @PreDestroy + +# Bean lifecycle callbacks (SmartLifeCycle interface) +* start(<>) +* stop(<>) +* isAutoStartup() +* isRuning() + +# Bean scopes +* Default Singleton +* Prototype +* @Scope("request") - Custom new instance for every http request @Scope("request") +* @Scope("session") +* org.springframework.beans.factory.config.Scope - itself a contract (and bean) within Spring Framework + * ConversationId -- could be sessionId or any custom scope id + * ConversationId - Can be treated like "cacheRegion", it invalidates automatically when conversation ends + * HTTPRequest, Session (scope) + * This is better than Singleton as the scope is narrower +* org.springframework.beans.factory.config.CustomScopeConfigurer (resolves for request or session) +* @org.springframework.context.annotation.Scope("thread") + * ThreadAnnouncer + +## Introduce new behaviour using BeanFactory PostProcessors +* Add/change behaviour of the beans +* Overrides/Viteos the actual behaviour using proxy layer +* Spring itself uses this feature a lot +* It can change before object is constructed, but can't do anything about constrcution process itself +* Possible to add auditing layer using this feature +* Spring MVC binds methods to HTTP_ENDPOINT, this feature was implemented by Beanfactory PostProcessor +* BeanFactoryPostProcessor implementation should be returned via staic method in Configuration class (as it should be loaded before all the objects are created.) +* All BeanFactoryPostProcessor should be static +* BeanFactoryPostProcessor method interceptor better to check for annotation (instead of blanket feature) + * For example, check for @Timed annotation, so we can add time method invocation +* https://github.com/joshlong/a-walking-tour-of-all-of-springdom/blob/master/ioc/src/main/java/com/joshlong/spring/walkingtour/ioc/manybeans/bfpp/SoxComplianceSuite.java + +## Reference +* [Josh Long - Spring Core](https://github.com/joshlong/a-walking-tour-of-all-of-springdom) +* [Proxy method logger example](https://github.com/joshlong/a-walking-tour-of-all-of-springdom/blob/master/ioc/src/main/java/com/joshlong/spring/walkingtour/ioc/manybeans/bpp/MethodTimeLoggingBeanPostProcessor.java) +* [AspectJ Auto Proxy - AOP](https://github.com/joshlong/a-walking-tour-of-all-of-springdom/blob/master/ioc/src/main/java/com/joshlong/spring/walkingtour/ioc/manybeans/aop/MethodTimeLoggingAspect.java) + +## Introduce new behaviour using AOP (not beanfactory) +* No convenient way to introduce behaviour to all the methods in OOP (failure of OOP?) +* PointCut + * Pattern of Objects and Methods + * Globs + * Example: beginTxn* (methods), execute* (methods) +* JointPoint - Current method under execution + +## How to get reference of object managed outside the Spring Container +* FactoryBean + * FactoryBean{ T getObject(); Class getObjectType(); boolean isSingleton() } + * [FactoryBean](https://github.com/spring-projects/spring-framework/blob/master/spring-beans/src/main/java/org/springframework/beans/factory/FactoryBean.java) + + +## TaskExecutorFactoryBean +* [TaskExecutorFactoryBean](https://github.com/spring-projects/spring-framework/blob/master/spring-context/src/main/java/org/springframework/scheduling/config/TaskExecutorFactoryBean.java) + +## SpEL +* Spring Expression Language +* [Spel Config](https://github.com/joshlong/a-walking-tour-of-all-of-springdom/blob/master/ioc/src/main/java/com/joshlong/spring/walkingtour/ioc/strangebeans/spel/Config.java) +* "#{ T(Math).random()}" + +## Spring Bean Profiles +* We may need to use bean with some setting in development and some other settings in production + * Spring Profile is there to rescue + * InMemory during testing/developement, whereas something concrete while on production +* Bean would be available only when profile related to it is active +* We can tag bean with - @Profile("embedded"), @Profile("dev"), @Profile("production") +* We need to pass the profiles.active properties - -Dspring.profiles.active=embedded +* applicationContext.getEnvironment().setActiveProfiles("default") - Api to activate profile +* We can pass multiple value comma separated -Dspring.profiles.active=embedded,dev,uat + + +## Spring Configuration +* We can split configuration and import and aggregate them +* org.springframework.core.env.Environment has all the environment varaibles +* org.springframework.core.env.PropertyResolver + * Works with PropertySources + * PropertSources -> PropertisFiles, JNDI, Env + * We can extend PropertSources to read from rdbms tables + +## Spring Manage threads, Scheduling task +* JEE supports Workmanager API - javax.resource.spi.work.WorkManager +* JSDK doesn't have equivalent to that. +* Spring TaskExecutor - predates Java5 +* It works in any container and/or app-server based on the environment +* Example for Glassfish Spring implements - https://github.com/ndimiduk/spring-framework/blob/master/org.springframework.transaction/src/main/java/org/springframework/jca/work/glassfish/GlassFishWorkManagerTaskExecutor.java +* @EnableAsync - Method can return TaskExecutor + * @Async annotation for any method, would leads to asnchronous call + * @Async method call would be invoked inside runnable, caller will not be blocked +* @EnableScheduling + * @EnableScheduling will search for TaskScheduler inside the configuration + * @ConcurrentTaskScheduler, @ThreadPoolTaskScheduler, @TimerMangerTaskScheduler + * Enables 3 kinds of scheduling + * We can annotate any method with Cron confiugration + * @Schduled(cron="*/10 * * * * *") + * @Schduled(fixedRate=15 * 1000) // Every 15 seconds, doesn't worry about how long it takes + * @Schduled(fixedDelay=20 * 1000) // Every 20 seconds only if prior invocation finished + +## Caching +* To increase performance, cache the prior result or data +* org.springframework.cache.{CacheManager, Cache} +* Backend adapters for EhCache, Gemfire, Coherence, JSR107, Redis +* Spring-data-redis provides redis support +* @EnableCaching works with CacheManger +* @Cacheable (for loaders and getters) and @CacheEvict (for delete and remove methods) + * @Cacheable("customers") + * @Cacheable(value="customers", condition="name.length<10") + * condition="name.length<10" - is SpEL + * customers is regionName or Cache name + * @Cacheable(value="customers", key="id) + * if method has multiple argument, still id would be considered as key ex: loadOwnersByIdAndName() + * @CacheEvict("customers") (generall with delete method) + +## Spring relational data access + +### Relation data access problems + +* Resource acquisition code will be boiler-plate code +* Mapping and converting between relational and java-bean +* There are multiple ways to access data, JDBC, JPA, JDO +* PrimaryKeyViolationException exception manifiest into multiple different exception based on persistence framework being used (JPA, JDO), but we need common exception +* Java has checkedException, but we may need non-checked exception to save rest of the layer +* Transaction might needs to be handled using different way based on transaction manager +* We might swtich from one vendor to another + +### Solution provided by Spring + +* Template Objects +* Peristence exception translation +* Transaction management +* Repositories + * Knows how to access data from underlying data-storage + * A repository is a datasource, it knows how to read from underlying storage, data access functionality +* Object Mapping and Serialization + * JdbcObjectMapper, RowMapper + * Hibernate Provides for JDBC, but spring provides generic one that could be used for any data-source + + +### Transaction Management + +* begin; do n task; commitl iff error => rollbackl end; +* JTA (vendor API) +* JPA EntityManager API +* JDBC Connection and PreparedStatement + +### Transaction Management - PlatformTransactionManger (and TransactionTemplate) + +* Spring generic framework to support transaction +* PlatformTransactionManger{ TransactionStatus getTransaction(@Nullable TransactionDefinition definition); Commit(TransactionStatus status) ; rollback(TransactionStatus status) } +* PlatformTransactionManager + * (https://github.com/spring-projects/spring-framework/blob/master/spring-tx/src/main/java/org/springframework/transaction/PlatformTransactionManager.java) + * JpaTransactionManager + * JdoTransactionManager + * JmsTransactionManager + * HibernateTransactionManger + * RabbitTransactionManager + +### Transaction Management - TransactionTemplate + +* TransactionTemplate requires TransactionManager (like jdbcTemplate requires dataSource) +* https://github.com/spring-projects/spring-framework/blob/master/spring-tx/src/main/java/org/springframework/transaction/support/TransactionTemplate.java +* To construct TransactionTemplate, PlatformTransactionManger should be passed as an argument to TransactionTemplate + ```java + transactionTemplate.execute(new TransactionCallbackWithoutResult({ + public void doInTransactionWithoutResult(TransactionStatus status){ + jdbcTemplate.execute(); + } + )); +``` +* TransactionTemplate and TransactionManager - are too low -level, we can manage txns with annotations + +### With Annotation (higher level, interally TransactionTemplate being used) + +* @EnableTransactionManagement requires @Bean of type PlatformTransactionManger +* @Transactional annotation can be used to any method that requires annotation +* @Transactional requires @EnableTransactionManagement + * Simlar to @Cacheable requires @EnableCaching + * This pattern occurs in SpringFramework + + +### Database can be accesed via jdbcTemplate or Command Object (SimpleJdbcInsert) + +* JDBC Command object is not as famous as jdbcTemplate +* Jdbc Command object is quite useful in certain cases, we can generalize few methods like generic inserts +* (https://github.com/joshlong/a-walking-tour-of-all-of-springdom/blob/master/services/src/main/java/com/joshlong/spring/walkingtour/services/jdbc/JdbcCustomerService.java) +* Multiple datasource hidden using spring data access framework + * (https://github.com/joshlong/a-walking-tour-of-all-of-springdom/blob/master/services/src/main/java/com/joshlong/spring/walkingtour/services/CustomerServiceMain.java) + * + + + +---------------------------- +```java +public class MethodTimeLoggingBeanPostProcessor implements BeanPostProcessor { + + + @Override + public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { + return bean; + } + + @Override + public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { + ProxyFactory factory = new ProxyFactory(); + factory.addAdvice(new TimeLoggingMethodInterceptor()); + factory.setTarget(bean); + return (Object) factory.getProxy(); + } + + + /** + * logs the method invocation times + */ + private class TimeLoggingMethodInterceptor implements MethodInterceptor { + @Override + public Object invoke(MethodInvocation invocation) throws Throwable { + + Object result = null; + if (invocation.getMethod().getAnnotation(Timed.class) != null) { + long start = System.currentTimeMillis(); + result = invocation.proceed(); + long stop = System.currentTimeMillis(); + System.out.println(invocation.getMethod().getName() + ": " + (stop - start) + "ms"); + } else { + result = invocation.proceed(); + } + return result; + } + } +} +``` +--- +```java +@Configuration +public class ServiceConfig { + + @Bean + public TransferService transferService(AccountRepository accountRepository) { + return new TransferServiceImpl(accountRepository); + } +} + +@Configuration +public class RepositoryConfig { + + @Bean + public AccountRepository accountRepository(DataSource dataSource) { + return new JdbcAccountRepository(dataSource); + } +} + +@Configuration +@Import({ServiceConfig.class, RepositoryConfig.class}) +public class SystemTestConfig { + + @Bean + public DataSource dataSource() { + // return new DataSource + } +} + +public static void main(String[] args) { + ApplicationContext ctx = new AnnotationConfigApplicationContext(SystemTestConfig.class); + // everything wires up across configuration classes... + TransferService transferService = ctx.getBean(TransferService.class); + transferService.transfer(100.00, "A123", "C456"); +} +``` + +// Command Object +```java + + public Customer createCustomer(String fn, String ln) { + + Map args = new HashMap(); + args.put("first_name", fn); + args.put("last_name", ln); + + SimpleJdbcInsert simpleJdbcInsert = new SimpleJdbcInsert(this.jdbcTemplate); + simpleJdbcInsert.setTableName("customer"); + simpleJdbcInsert.setColumnNames(new ArrayList(args.keySet())); + simpleJdbcInsert.setGeneratedKeyName("id"); + + Number id = simpleJdbcInsert.executeAndReturnKey(args); // the ID of the inserted record. + Long longId = (Long) id; + BigInteger bigInteger = BigInteger.valueOf(longId); + return getCustomerById(bigInteger); + } + +``` \ No newline at end of file diff --git a/src/main/md/Tools/Spring5/Sprint-boot.md b/src/main/md/Tools/Spring5/Sprint-boot.md new file mode 100644 index 00000000..d8386b05 --- /dev/null +++ b/src/main/md/Tools/Spring5/Sprint-boot.md @@ -0,0 +1,8 @@ +* [Spring JPA selecting specific columns](https://stackoverflow.com/questions/22007341/spring-jpa-selecting-specific-columns/36021243) +* [How to call a service from Main application calls Spring Boot?](https://stackoverflow.com/questions/49698963/how-to-call-a-service-from-main-application-calls-spring-boot) +* [How to Autowire conditionally in spring boot?](https://stackoverflow.com/questions/57656119/how-to-autowire-conditionally-in-spring-boot) +* [How to Manage Multiple Spring Boot in the same project](https://stackoverflow.com/questions/39680990/how-to-manage-multiple-spring-boot-in-the-same-project) +* [How to run spring-boot as a client application?](https://stackoverflow.com/questions/39205430/how-to-run-spring-boot-as-a-client-application/39206323#39206323) +* [spring boot - launch twice with different ports](https://stackoverflow.com/questions/39494633/spring-boot-specify-port-at-the-mapping-level/39496586#39496586) +* [https://stackoverflow.com/questions/26105061/spring-boot-without-the-web-server](https://stackoverflow.com/questions/26105061/spring-boot-without-the-web-server) +* [Spring Boot File Upload / Download with JPA, Hibernate, and MySQL database](https://www.callicoder.com/spring-boot-file-upload-download-jpa-hibernate-mysql-database-example/) \ No newline at end of file diff --git a/src/main/md/Tools/Spring5/spring-boot-security.md b/src/main/md/Tools/Spring5/spring-boot-security.md new file mode 100644 index 00000000..e21de4f7 --- /dev/null +++ b/src/main/md/Tools/Spring5/spring-boot-security.md @@ -0,0 +1,120 @@ +## Security +* Secure the core service +* Add filter that would validate for security token or redirect for authentication +* Add all service interceptor and populate with security token to probagate security to server + * For example, this would probagate security token to servers - UserFeignClientInterceptor implements RequestInterceptor +* spring-boot-autoconfigure-2.1.2.RELEASE.jar contains all the default configuration inside the file spring.factories +* + +## OAuth Token based security +* Add following configuration on Rest Server serivce +```xml + + org.springframework.boot + spring-boot-starter-security + + + org.springframework.security.oauth.boot + spring-security-oauth2-autoconfigure + 2.0.5.RELEASE + + + org.springframework.cloud + spring-cloud-security + +``` +* Add following configuration on Rest Server serivce +```properties +zuul.sensitive-headers=Cookie,Set-Cookie +hystrix.shareSecurityContext=true + +security.oauth2.client.client-id=0oadx7zrsjjgIMgQZ0h7 +security.oauth2.client.client-secret=vcBjfdFw9rDUsWGasf0ramwfSu_xHfENWWEinq-y +security.oauth2.client.access-token-uri=https://dev-158606.oktapreview.com/oauth2/default/v1/token +security.oauth2.client.user-authorization-uri=https://dev-158606.oktapreview.com/oauth2/default/v1/authorize +security.oauth2.client.scope=openid profile email +security.oauth2.resource.user-info-uri=https://dev-158606.oktapreview.com/oauth2/default/v1/userinfo +``` +* Add anotation of @EnableOAuth2Sso on Secured applicaton +* Add interceptor to the application +```Java + @Bean + public RequestInterceptor getUserFeignClientInterceptor() { + return new UserFeignClientInterceptor(); + } +``` +* ResourceServerConfig should match with security header that comes in request +* Add SecurityConfig +```java +package com.example.edgeservice; + +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import org.springframework.security.web.util.matcher.RequestHeaderRequestMatcher; + +@Configuration +@EnableResourceServer +public class ResourceServerConfig extends ResourceServerConfigurerAdapter { + + @Override + public void configure(HttpSecurity http) throws Exception { + http + .requestMatcher(new RequestHeaderRequestMatcher("Authorization")) + .authorizeRequests() + .antMatchers("/**").authenticated(); + } +} +``` + +* Add WebSecurityConfigure +```java +import org.springframework.boot.actuate.autoconfigure.security.servlet.EndpointRequest; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; + +@Configuration +public class SecurityConfig extends WebSecurityConfigurerAdapter { + + @Override + protected void configure(HttpSecurity http) throws Exception { + http + .authorizeRequests() + .requestMatchers(EndpointRequest.toAnyEndpoint()).hasRole("ADMIN") + .anyRequest().authenticated() + .and() + .httpBasic(); + } +} +``` +* All the service invocation should pass the token to the servers, let us add code to interceptor +```java +package com.example.edgeservice; + +import feign.RequestInterceptor; +import feign.RequestTemplate; +import org.springframework.security.core.Authentication; +import org.springframework.security.core.context.SecurityContext; +import org.springframework.security.core.context.SecurityContextHolder; +import org.springframework.security.oauth2.provider.authentication.OAuth2AuthenticationDetails; +import org.springframework.stereotype.Component; + +@Component +public class UserFeignClientInterceptor implements RequestInterceptor { + private static final String AUTHORIZATION_HEADER = "Authorization"; + private static final String BEARER_TOKEN_TYPE = "Bearer"; + + @Override + public void apply(RequestTemplate template) { + SecurityContext securityContext = SecurityContextHolder.getContext(); + Authentication authentication = securityContext.getAuthentication(); + + if (authentication != null && authentication.getDetails() instanceof OAuth2AuthenticationDetails) { + OAuth2AuthenticationDetails details = (OAuth2AuthenticationDetails) authentication.getDetails(); + template.header(AUTHORIZATION_HEADER, String.format("%s %s", BEARER_TOKEN_TYPE, details.getTokenValue())); + } + } +} +``` \ No newline at end of file diff --git a/src/main/md/Tools/Spring5/spring5.md b/src/main/md/Tools/Spring5/spring5.md new file mode 100644 index 00000000..57252b05 --- /dev/null +++ b/src/main/md/Tools/Spring5/spring5.md @@ -0,0 +1,22 @@ +## Spring commands +* mvn spring-boot:run --debug +* -Dspring.profiles.active=dev +* https://start.spring.io/ with "JPA, H2, Web, Lombok" +* -Dspring-boot.run.arguments=--logging.level.org.springframework=TRACE,--logging.level.com.bank.module=TRACE + + +## Known erros +* Unable to find page - Whitelabel Error Page - This application has no explicit mapping for /error, so you are seeing this as a fallback. - Wed Dec 05 19:30:50 SGT 2018 - There was an unexpected error (type=Not Found, status=404). - No message available + * Have you registered the @Controller + +# Blog +* + +# Reading list +* https://docs.spring.io/spring-framework/docs/current/spring-framework-reference/core.html +* https://docs.spring.io/spring-framework/docs/current/spring-framework-reference/testing.html#testing +* https://docs.spring.io/spring-framework/docs/current/spring-framework-reference/data-access.html#spring-data-tier +* https://docs.spring.io/spring-data/commons/docs/2.0.3.RELEASE/reference/html/#projections.interfaces.closed +* https://docs.spring.io/spring-data/elasticsearch/docs/3.1.0.RELEASE/reference/html/ +* https://docs.spring.io/spring-data/jpa/docs/2.1.0.RELEASE/reference/html/ +* https://docs.spring.io/spring-webflow/docs/2.5.0.RELEASE/reference/html/ \ No newline at end of file diff --git a/src/main/md/Tools/Sql.md b/src/main/md/Tools/Sql.md new file mode 100644 index 00000000..2ebf7265 --- /dev/null +++ b/src/main/md/Tools/Sql.md @@ -0,0 +1,101 @@ +Oracle.. +* In Oracle, RowID - physical location id for row. rowid for a row never changes. Rownum changes for every sql resultset. + +|ROWID |ROWNUM | +|:----------------------|------:| +|AAAAECAABAAAAgiAAA |1 | + + +* Select into table +```SQL +insert into ENTL_ENTITY Select 700, 1,'A123456',DESCR,TYPE,STATUS,DATE_CREATED,DATE_MODIFIED,MODIFIED_BY,CREATED_BY,UNIVERSE_ID From ENTL_ENTITY Where ID = 667 + ``` +```SQL + select count(account_number) as cnt, CHH_ID from chh_acct_map where approved_on is not null group by chh_id order by cnt desc +``` +* Select distinct column without using distinct +```SQL +select LENGTH from FILEMETA group by LENGTH +``` +```SQL +SELECT Employee.Name, Department.DeptName FROM Employee, Department WHERE Employee.Dept_ID = Department.Dept_ID; +``` +# Order/Ranking +```SQL +SELECT MIN(Wages) FROM +( + SELECT TOP 3 Wages FROM table ORDER BY Wages DESC; +) As tmp; +``` +```SQL +/*#IInd best mark*/ +select max(a.mark) from student a where a.mark not in (select max(b.mark) from student b) +``` + +```SQL +# Delete duplicate +delete from table_name where rowid not in (select max(rowid) from table group by duplicate_values_field_name); + +delete duplicate_values_field_name dv from table_name ta where rowid <(select min(rowid) from table_name tb where ta.dv=tb.dv); + +DELETE FROM Employee WHERE EmpID NOT IN (SELECT MAX(EmpID) FROM MyTable GROUP BY EmpName) +``` +* Join types are + * self join + * outer join (LEFT, RIGHT), + * cross-join ( SELECT * FROM table1, table2 - product n*m rows returned) + +* SQL should be better practiced in notebook for joins, group by, group by with joins and self Join +* SQL should be supported with knowledge about Index - Clustered and non Clustered" + +* The following summarizes the result of the join operations: + * The result of T1 **INNER JOIN** T2 consists of their paired rows where the join-condition is true. + * The result of T1 **LEFT OUTER JOIN** T2 consists of their paired rows where the join-condition is true and, for each unpaired row of T1, the concatenation of that row with the null row of T2. All columns derived from T2 allow null values.** + * The result of T1 **RIGHT OUTER JOIN** T2 consists of their paired rows where the join-condition is true and, for each unpaired row of T2, the concatenation of that row with the null row of T1. All columns derived from T1 allow null values.** + * The result of T1 **FULL OUTER JOIN** T2 consists of their paired rows and, for each unpaired row of T2, the concatenation of that row with the null row of T1 and, for each unpaired row of T1, the concatenation of that row with the null row of T2. All columns derived from T1 and T2 allow null values.** + + + ```SQL +/*#show all Users that do not have addresses */ +select * from User u +left outer join Address a on u.UserID = a.UserID where a.UserID is null + ``` + +```SQL +/*Having vs Where*/ +/* HAVING specifies a search condition for a group or an aggregate function used in SELECT statement. */ +select City, SUM(Salary) as TotalSalary +from tblEmployee +Where Gender = 'Male' +group by City +Having City = 'London' + +select City, CNT=Count(1) +From Address +Where State = 'MA' +Group By City +Having Count(1)>5 + +SELECT edc_country, COUNT(*) +FROM Ed_Centers +GROUP BY edc_country +HAVING COUNT(*) > 1 +ORDER BY edc_country; +``` + +```SQL +/* With clause query */ +WITH employee AS (SELECT * FROM Employees) +SELECT * FROM employee WHERE ID < 20 +UNION ALL +SELECT * FROM employee WHERE Sex = 'M' +``` + +```SQL +/* multiple with clause query */ +WITH SET1 AS (SELECT SYSDATE FROM DUAL), -- SET1 initialised + SET2 AS (SELECT * FROM SET1) -- SET1 accessed +SELECT * FROM SET2; -- SET2 projected +``` + +# https://en.wikipedia.org/wiki/Correlated_subquery \ No newline at end of file diff --git a/src/main/md/Tools/Transactions.md b/src/main/md/Tools/Transactions.md new file mode 100644 index 00000000..c8b75e7e --- /dev/null +++ b/src/main/md/Tools/Transactions.md @@ -0,0 +1,3 @@ + +# References +* http://jimgray.azurewebsites.net/papers/thetransactionconcept.pdf \ No newline at end of file diff --git a/src/main/md/Tools/Typescript_migration.md b/src/main/md/Tools/Typescript_migration.md new file mode 100644 index 00000000..adbd5b1a --- /dev/null +++ b/src/main/md/Tools/Typescript_migration.md @@ -0,0 +1,6 @@ +* Use strict-mode for full use of typescript +* + +# References +* [Typescript cheatsheet](https://github.com/typescript-cheatsheets/react-typescript-cheatsheet) +* [Writing React with TypeScript](https://dev.to/skurfuerst/writing-react-with-typescript-o5j) \ No newline at end of file diff --git a/src/main/md/Tools/Youdontknow_javascript.md b/src/main/md/Tools/Youdontknow_javascript.md new file mode 100644 index 00000000..4e216459 --- /dev/null +++ b/src/main/md/Tools/Youdontknow_javascript.md @@ -0,0 +1,36 @@ +* If Carnival is related to Car, Javascript is related to Java +* Eccentricities of JS make it interesting +* The understanding of the language is often never attained should be blamed instead of language. +* https://github.com/getify/You-Dont-Know-JS/blob/master/scope%20%26%20closures/ch1.md + +``` +The ability to store values and pull values out of variables is what gives a program state. +Javascript parses and compiles language internally, its compilation shouldn't be compared with traditional compilation +``` + +* Tokenizing/Lexing +* + +``` + function foo(a) { + console.log( a + c ); + } + console.log(c == undefined); + foo( 2 ); // 4 + var c = 2; +``` + +``` +function foo(a) { + console.log( a + b ); + b = a; +} + +foo( 2 ); +``` + + + +# References +* [You-Dont-Know-JS/scope & closures/ch1.md](https://github.com/getify/You-Dont-Know-JS/blob/master/scope%20%26%20closures/ch1.md) +* \ No newline at end of file diff --git a/src/main/md/Tools/beginner_developer_tools.md b/src/main/md/Tools/beginner_developer_tools.md new file mode 100644 index 00000000..a89a8a59 --- /dev/null +++ b/src/main/md/Tools/beginner_developer_tools.md @@ -0,0 +1,35 @@ +# What a beginner developer should practice? + +* Track all below progress using markdown table +* Visual Studio Code +* Markdown + * Learn how to preview continiously using VSCode +* Git - Install git locally and should know minium 20 basic commands + * Ensure github/gilab account is available +* Install git-bash and set-it up in windows PATH + * Copy find command to gfind command, so that it won't conflict with windows find command +* Learn 40 linux commands using git-bash + * awk, grep, sed, find, head, tail, tac, cat, echo, basic shell programs +* Install mysql and document your learning using markdown + * Create table, insert data, query data +* Watch - Google chrome developer tools + * [Google Chrome Developer Tools Crash Course](https://www.youtube.com/watch?v=x4q86IjJFag) + * [What's new in Chrome DevTools (Google I/O '18)](https://www.youtube.com/watch?v=mfuE53x4b3k) +* Learning tips and tricks + * Watch youtube video in twice the speed, but document your learning, and refresh after some interval + * How to search code in github + * How to search code in SO +* Complete reading - https://github.com/MostlyAdequate/mostly-adequate-guide +* Install basic node.js program and execute +* Install basic python, and quick hell-world program +* Read code snippet + * https://github.com/Chalarangelo/30-seconds-of-code + * https://github.com/kriadmin/30-seconds-of-python-code + * https://gist.github.com/thomd/9155311 +* Maven + * https://www.youtube.com/watch?v=zPg5aPjh-sA +* Virtual box + * https://www.youtube.com/watch?v=D1dVhDYAv9E +* Tools + * https://start.spring.io/ + * https://gitignore.io \ No newline at end of file diff --git a/src/main/md/Tools/browswer_tools.md b/src/main/md/Tools/browswer_tools.md new file mode 100644 index 00000000..26211f6c --- /dev/null +++ b/src/main/md/Tools/browswer_tools.md @@ -0,0 +1,4 @@ +* If we need to make javascript as book-markelet, always use it always after encode them as URI + +* Remove fixed element from browswer + * ```javascript:(function(){(function () {var i, elements = document.querySelectorAll('body *');for (i = 0; i < elements.length; i++) {if (getComputedStyle(elements[i]).position === 'fixed') {elements[i].parentNode.removeChild(elements[i]);}}})()})()``` \ No newline at end of file diff --git a/src/main/md/Tools/chrome.md b/src/main/md/Tools/chrome.md new file mode 100644 index 00000000..e9ce601e --- /dev/null +++ b/src/main/md/Tools/chrome.md @@ -0,0 +1,6 @@ +* chrome://chrome-urls/ +* chrome://cast/#devices +* chrome://cast/#devices/192.168.0.101 +* youtube.com?disable_polymer=true +* Open Chrome, and type this in url "data:text/html, " + * Open Chrome, and type this in url "data:text/html, test" \ No newline at end of file diff --git a/src/main/md/Tools/codeSearch.md b/src/main/md/Tools/codeSearch.md new file mode 100644 index 00000000..66e2c6ed --- /dev/null +++ b/src/main/md/Tools/codeSearch.md @@ -0,0 +1,40 @@ +* Quick search operators + * filename:pom.xml + * extension:conf or extension:yaml + * language:scala + * stars:>10 +* function language:javascript +* Search repository containing a word "aws.Polly" but written using Javascript + * aws.Polly language:javascript stars:>10 +* (search "AWS SWF" in file name or in path name) + * AWS SWF in:file,path +* Find project using below libraries on Maven + * spring hibernate spock filename:pom.xml +* Search Akka Stream usage http://doc.akka.io/docs/akka/2.4/scala/stream/stream-io.html#Streaming_File_IO usage + * https://github.com/search?q=FileIO.fromPath+language:Scala&type=Code +* Search code for integration layer + * api.github.com language:scala +* Search the configuration, Extension here is file extension name + * ClusterActorRefProvider extension:conf +* Search the respositories, where repo name contains a word course + * course in:name +* Search documentation on github + * https://github.com/search?q=hot+module+reload+express+js&type=Wikis +* Search document without a word + * hot module reload express js NOT react +* Search a sample configuration for babel-preset-env within any of package.json + * babel-preset-env filename:package.json created:>2017-12-31 +* Search the repo that uses puppeteer and fs module + * fs puppeteer extension:js language:js + * use sort by using "recenly indexed" + +# Famous code searchs +* [Javascript courses = course in:name language:javascript forks:>1000](https://github.com/search?q=course+in%3Aname+language%3Ajavascript+forks%3A%3E1000) +* [java courses = course in:name language:java forks:>1000](https://github.com/search?q=course+in%3Aname+language%3Ajava+forks%3A%3E1000) + +## References +* https://github.com/search +* https://github.com/search/advanced +* https://help.github.com/articles/searching-code/ +* https://gist.github.com/search + diff --git a/src/main/md/Tools/code_read.md b/src/main/md/Tools/code_read.md new file mode 100644 index 00000000..09d4f7be --- /dev/null +++ b/src/main/md/Tools/code_read.md @@ -0,0 +1,15 @@ +## Cod reading + +## Tools +* [SchemaSpy](http://schemaspy.sourceforge.net/) +* [Use java class visualizer](http://www.class-visualizer.net/) + + +## References +* [how_do_i_start_understanding_a_humongous_codebase](https://www.reddit.com/r/learnprogramming/comments/3ebgy3/how_do_i_start_understanding_a_humongous_codebase/) +* [software_engineeringdesign_how_do_you_approach](https://www.reddit.com/r/learnprogramming/comments/1ph1a7/software_engineeringdesign_how_do_you_approach/) +* [learning_to_use_a_large_library](https://www.reddit.com/r/learnprogramming/comments/13evyx/learning_to_use_a_large_library/) +* [how_to_learn_a_large_code_base](https://www.reddit.com/r/learnprogramming/comments/1qjffk/how_to_learn_a_large_code_base/) +* [advice_for_jumping_into_an_already_built_code_base](https://www.reddit.com/r/webdev/comments/9ca15v/advice_for_jumping_into_an_already_built_code_base/) +* [how_to_learn_a_new_codebase_quickly](https://www.reddit.com/r/learnprogramming/comments/89pjdy/how_to_learn_a_new_codebase_quickly/) +* [my_coding_skills_is_just_me_memorizing_code_what](https://www.reddit.com/r/webdev/comments/8hw1zf/my_coding_skills_is_just_me_memorizing_code_what/) diff --git a/src/main/md/Tools/css_flexbox.md b/src/main/md/Tools/css_flexbox.md new file mode 100644 index 00000000..7bf45f28 --- /dev/null +++ b/src/main/md/Tools/css_flexbox.md @@ -0,0 +1,21 @@ +* [CSS Grid layout vs Flexbox](https://medium.com/youstart-labs/beginners-guide-to-choose-between-css-grid-and-flexbox-783005dd2412) + +## CSS Online tools for Generator +* [CSS Grid Genertor](https://cssgrid-generator.netlify.com/) +* [An interactive CSS Grid generator](https://grid.layoutit.com/) +* [An interactive border radius](https://9elements.github.io/fancy-border-radius/) +* [CSS Grid Layout Generator](https://css-grid-builder.octrace.pro/#grid) + +## CSS Online tools for Flexbox +* [Flexbox Generator](https://loading.io/flexbox/) +* [flexbox playground and code generator](https://the-echoplex.net/flexyboxes/) +* [TEST CSS FLEXBOX RULES](https://flexbox.help/) +* [BUILD WITH FLEXBOX](http://flexbox.buildwithreact.com/) +* +## Flexbox +* [Flxexbox cheat sheet](https://www.reddit.com/r/css/comments/bdvp7j/flexbox_visual_cheat_sheet/) +* [A Complete Guide to Flexbox](https://css-tricks.com/snippets/css/a-guide-to-flexbox/) +* [Flexbox - CodePen](https://codepen.io/enxaneta/pen/adLPwv) +* [Flexbox - Visual Cheat Sheet](https://i.imgur.com/0drn44G.png) +* [Flexbox visual dictionary](http://www.csstutorial.org/flex-both.html) + diff --git a/src/main/md/Tools/curl.md b/src/main/md/Tools/curl.md new file mode 100644 index 00000000..92cbba16 --- /dev/null +++ b/src/main/md/Tools/curl.md @@ -0,0 +1,18 @@ +* curl is library libcurl and command line tool + * can be used to test communication for many protocols +* **curl can't recursively download files, so it can't mirror a site to remote location. If we need it, we have to make use of wget** +* curl supports FTP, FTPS, Gopher, HTTP, HTTPS, SCP, SFTP, TFTP, Telnet, DICT, LDAP, LDAPS, IMAP, POP3, SMTP, RTSP and URI +* curl command line options + * -o output to a file + * -L follow the redirect +* curl usage + * curl -X ':///?' -d '' + * curl --cookie "name=xyz" --referer -H "Accept: application/json" -H "Content-Type: application/json" http://hostname/resource + * curl --cookie "name=xyz" --referer --header "Accept: application/json" http://hostname/resource + * curl --data "param1=value1¶m2=value2" http://hostname/resource + * curl --form "fileupload=@filename.txt" http://hostname/resource + * curl -X POST -d @jsonFile http://hostname:9200/_search + * wget --no-parent -r http://WEBSITE.com/DIRECTORY (only stuff curl can't do, downloadin recusive) + +# Reference +* [Curl SO](https://stackoverflow.com/questions/356705/how-to-send-a-header-using-a-http-request-through-a-curl-call) \ No newline at end of file diff --git a/src/main/md/Tools/deduplicate_matching_algorithm.md b/src/main/md/Tools/deduplicate_matching_algorithm.md new file mode 100644 index 00000000..9bde13ab --- /dev/null +++ b/src/main/md/Tools/deduplicate_matching_algorithm.md @@ -0,0 +1 @@ +https://mailinator.blogspot.com/2012/02/how-mailinator-compresses-email-by-90.html \ No newline at end of file diff --git a/src/main/md/Tools/distributed_computing.md b/src/main/md/Tools/distributed_computing.md new file mode 100644 index 00000000..25c1d96b --- /dev/null +++ b/src/main/md/Tools/distributed_computing.md @@ -0,0 +1,5 @@ +* [Why distributed systems are so hard](https://drive.google.com/file/d/15nxAaVXZwNFnJNVvgtKonNbzxNgTUCxP/view) + +# Distributed systems failure - Consequences +* You sell more quantity available in stock due to parallel decrement +* More tickets are sold \ No newline at end of file diff --git a/src/main/md/Tools/docker.md b/src/main/md/Tools/docker.md new file mode 100644 index 00000000..d44ab7c9 --- /dev/null +++ b/src/main/md/Tools/docker.md @@ -0,0 +1,168 @@ +## Docker theory +* The difference between a Docker image (class) and a Docker container (object) is the same as that of the difference between a class and an object. + +```bash +#!/bin/bash +# to run docker container -p host_port:container_port.. http://localhost:host_port/ +docker container run -p 9999:8888 YOUR_DOCKER_ID/myhello +# stop all containers +docker stop $(docker ps -aq) +# Delete all containers +docker rm $(docker ps -a -q) +# Delete all images +docker rmi $(docker images -q) +# Start a mongodb +docker run --restart unless-stopped --name mongo -d -p 0.0.0.0:27017:27017 mongo:3.4.18 +# Start a mysql +docker run --restart unless-stopped --name mysql -e MYSQL_ROOT_PASSWORD=root -p 0.0.0.0:31306:3306 -d mysql:8.0.14 +# inspect image +docker inspect $image_id_cc126d830f47 +# Bash as deamon process +docker run -it -d -p 8000:8000 -p 8002:8002 busybox bin/bash +``` + + +# Delete All Exited Containers (Linux): Or remove with certain image +```shell +docker rm $(docker ps -q -f status=exited) +docker images | grep "pattern" | awk '{print $1}' | xargs docker rm +``` + +# Docker (container using daemon process) for mvn remote debugging + +```bash +docker pull maven:3.6.3-jdk-8 +docker run -it -d -p 8000:8000 -p 8002:8002 maven:3.6.3-jdk-8 bin/bash +``` + +# Docker Scala JDK remote debug, Scala, sbt and ammonite + +```bash +sh -c '(echo "#!/usr/bin/env sh" && curl -L https://github.com/lihaoyi/Ammonite/releases/download/2.0.4/2.13-2.0.4) > /usr/local/bin/amm && chmod +x /usr/local/bin/amm' && amm +echo "deb https://dl.bintray.com/sbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list +curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | apt-key add +apt-get update +apt-get install sbt + +apt-get remove scala-library scala +wget www.scala-lang.org/files/archive/scala-2.11.8.deb +dpkg -i scala-2.11.8.deb + +curl -L -o sbt.deb http://dl.bintray.com/sbt/debian/sbt-0.13.15.deb +dpkg -i sbt.deb +apt-get update +apt-get install sbt + +mvn -Dmaven.surefire.debug test +mvn -Dmaven.surefire.debug="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 -Xnoagent -Djava.compiler=NONE" test +``` + + +# Delete All Dangling Images +```shell +docker rmi $(docker images -f dangling=true -q) +``` + +## Docker based mongodb +```bash +docker-compose -f src/main/docker/mongodb.yml up +docker run --name mongo -d mongo:3.4.18 +``` +## Docker based Cassandra +```bash +docker run --name some-cassandra -p 9042:9042 -p 7000:7000 --network host -d cassandra:latest +``` +## docker image elasticsearch +```bash +docker run -d --name elasticsearch -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" elasticsearch:6.8.6 +docker run -p 9200:9200 -p 9600:9600 -e "discovery.type=single-node" amazon/opendistro-for-elasticsearch:1.3.0 +``` + +## docker image build +```bash +docker image build -t imagename:version . +``` + +## Docker machine on Windows + +```bash +docker-machine ls +docker-machine rm default +docker-machine create -d virtualbox default +docker-machine start +docker-machine stop +docker-machine upgrade +docker-machine ssh +docker-machine ip +# C:\Users\nikia>docker-machine ip 192.168.99.101 +# Host ip IPv4 Address. . . . . . . . . . . : 192.168.0.109 +docker run --name some-cassandra -p 9042:9042 -p 7000:7000 --network host -d cassandra:latest +docker exec -it some-cassandra sh +``` + +## Docker DSE Cassandra + +```bsh +docker pull datastax/dse-server +docker pull datastax/dse-opscenter +docker pull datastax/dse-studio +# Start opscenter first and later link it dse-server +docker run -e DS_LICENSE=accept -9 8888:8888 -name my-opscenter -d datastax/dse-opscenter +docker run -e DS_LICENSE=accept -9 8888:8888 --link my-opscenter:opscenter -name my-dse -d datastax/dse-server +docker run -e DS_LICENSE=accept -9 9091:9091 --link my-dse -name my-studio -d datastax/dse-studio +docker exec -it my-dse cqlsh ip_address +``` + +### Docker PGSQL +``` +docker run --name postgres -e POSTGRES_PASSWORD=pgSecretProd26 -d -p 5432:5432 postgres +``` + +## If docker command is not connecting to docker-machine + +* Pay attention to environment variables + +```pre + DOCKER_CERT_PATH=C:\Users\nikias\.docker\machine\machines\default + DOCKER_HOST=tcp://192.168.99.111:2376 + DOCKER_MACHINE_NAME=default + DOCKER_TLS_VERIFY=1 + DOCKER_TOOLBOX_INSTALL_PATH=D:\Apps\Docker Toolbox +``` + +```bat +docker-machine env default +@FOR /f "tokens=*" %i IN ('docker-machine env default') DO @%i +``` + +## docker network + +```bash +$docker network ls +NETWORK ID NAME DRIVER SCOPE +713e294d5638 bridge bridge local +34d058f03720 host host local + +docker pull alpine +docker run -itd -network 713e294d5638 --name=alpine1 alpine +docker run -itd -network 713e294d5638 --name=alpine2 alpine +docker network inspect 713e294d5638 --format '{{ .Containers }}' +docker exec -it alpine1 ping 172.17.0.3 +#Above command worked +$ docker exec -it alpine1 ping alpine2 +ping: bad address 'alpine2' + +## Creating custom network (to ping using container-name) +docker network create --driver=bridge javahome +docker run -itd --network javahome --name=alpine1 alpine +docker run -itd --network javahome --name=alpine2 alpine +docker exec -it alpine1 ping alpine2 +``` + +## To connect to docker service from Windows +* Enable NAT and port-warding +* Add port forwarding like 7042 without host-ip and guest-ip +* docker-machine ip (use output of this ip to connect from the application) + +## $Reference +* [Docker samples](https://docs.docker.com/samples/) diff --git a/src/main/md/Tools/elasticsearch.md b/src/main/md/Tools/elasticsearch.md new file mode 100644 index 00000000..54d8a3ec --- /dev/null +++ b/src/main/md/Tools/elasticsearch.md @@ -0,0 +1,178 @@ +# ElasticSearch continious Query +* https://qbox.io/blog/elasticsesarch-percolator +* https://stackoverflow.com/questions/21536599/what-does-percolator-mean-do-in-elasticsearch +* [Robert Muir](https://www.elastic.co/cn/blog/author/robert-muir) +* https://sematext.com/blog/top-10-elasticsearch-mistakes/ +* https://www.elastic.co/guide/en/elasticsearch/reference/current/getting-started.html +* *ElasticSearch Get can accept body* + +# Getting Started Elastic + +```bash + export JAVA_HOME=/C/Apps/Java/jdk1.8.0_181/ + curl -L -O https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.4.1.zip + unzip elasticsearch-6.4.1.zip + cd elasticsearch-6.4.1 + bin/elasticsearch-plugin.bat install x-pack (by default x-pack is available) + wget -L http://127.0.0.1:9200/ + curl -XGET http://localhost:9200/_xpack + curl -XGET http://localhost:9200/_cluster/health + curl -XPOST 'http://localhost:9200/_shutdown' +``` + +# X-Pack +``` +http://localhost:9200/_xpack +http://localhost:9200/_xpack?categories=build,features +``` + +# Getting Started Kibana + +```bash + export JAVA_HOME=/C/Apps/Java/jdk1.8.0_181/ + curl -L -O https://artifacts.elastic.co/downloads/kibana/kibana-6.4.1-windows-x86_64.zip + unzip kibana-6.4.1-windows-x86_64.zip + cd kibana-6.4.1-windows-x86_64 + wget -L http://localhost:5601/ +``` + +# Features +* ElasticSearch would return result even if partially matches unlike RDBMS - Relevance +* Two client Java API + * Node client - The node client joins a local cluster as a non data node. In other words, it doesn�t hold any data itself, but it knows what data lives on which node in the cluster, and can forward requests directly to the correct node. + * Transport client - The lighter-weight transport client can be used to send requests to a remote cluster. It doesn�t join the cluster itself, but simply forwards requests to a node in the cluster. + + +# Theory +* In Elasticsearch, all data in every field is indexed by default. ES can use all of those inverted indices in the same query, to return results at breathtaking speed. +* A node is a running instance of Elasticsearch +* Documents can have their own _id or let Elasticsearch generate one for them. +* While a cluster consists of one or more nodes with the same cluster.name that are working together to share their data and workload. +* One node in the cluster is elected to be the master node, which is in charge of managing cluster-wide changes like creating or deleting an index, or adding or removing a node from the cluster. + * The master node does not need to be involved in document-level changes or searches +* An index is just a logical namespace that points to one or more physical shards. +* A shard is a low-level worker unit that holds just a slice of all the data in the index. +* As your cluster grows or shrinks, Elasticsearch will automatically migrate shards between nodes so that the cluster remains balanced +* Multiple nodes can share the same directory. +* As long as the second node has the same cluster.name as the first node (see the ./config/elasticsearch.yml file), it should automatically discover and join the cluster run by the first node. +* The more copies of data that you have, the more search throughput you can handle +* green - All primary and replica shards are active., yellow - All primary shards are active, but not all replica shards are active., red - Not all primary shards are active. +* When document deleted the _version number has been incremented. This is part of the internal bookkeeping, which ensures that changes are applied in the correct order across multiple nodes. +* ES uses - Optimistic Concurrency Control - Elasticsearch needs a way of ensuring that an older version of a document never overwrites a newer version. +* ES update = retrieve-change-reindex process (within the shard) +* ES Scripts - Scripts can be passed in as part of the request, retrieved from the special .scripts index, or loaded from disk. +* shard = hash(routing) % number_of_primary_shards +* There is no special mapping required for arrays. Any field can contain zero, one, or more values, in the same way as a full-text field is analyzed to produce multiple terms. + * all the values of an array must be of the same datatype - Elasticsearch will use the datatype of the first value in the array to determine the type of the new field. +* Empty values + * "null_value": null, "empty_array": [] and "array_with_null_value": [ null ] +* Lucene doesn�t understand inner objects. A Lucene document consists of a flat list of key-value pairs. In order for Elasticsearch to index inner objects usefully, it converts our document into something like (flattened object) + * {"tweet":["elasticsearch","flexible","very"],"user.id":["@johnsmith"],"user.gender":["male"],"user.age":[26],"user.name.full":["john","smith"],"user.name.first":["john"],"user.name.last":["smith"]} +* To distinguish between two fields that have the same name, we can use the full path (for example, user.name.first) or even the type name plus the path (tweet.user.name.first). +* ```{"followers":[{"age":35,"name":"Mary White"},{"age":26,"name":"Alex Jones"},{"age":19,"name":"Lisa Smith"}]}``` would be convereted to ```{"followers.age":[19,26,35],"followers.name":["alex","jones","lisa","smith","mary","white"]}``` + * This is sufficient for us to ask, �Is there a follower who is 26 years old?� + * We can�t get an accurate answer to this: �Is there a follower who is 26 years old and who is called Alex Jones?� + +# Type +* Lucene has no concept of document types. The type name of each document is stored with the document in a metadata field called _type. +* Lucene also has no concept of mappings. Mappings are the layer that Elasticsearch uses to map complex JSON documents into the simple flat documents that Lucene expects to receive. +* If different type has similar field, analyzer would be same, to avoid, we can change field name slightly different + * title_en, title_es instead of title +* + +# Query Theory +* You can find only terms that exist in your index, so both the indexed text and the query string must be normalized into the same form. +* Process of tokenization and normalization is called analysis + * First, tokenizing a block of text into individual terms suitable for use in an inverted index, + * Then normalizing these terms into a standard form to improve their �searchability,� or recall +* Standard analyzer + * Character filters, Tokenizer and Tokenizer filter (a, and the) +* When we query an exact-value field, the query will not analyze the query string, but instead search for the exact value that you have specified. +* String: string, Whole number: byte, short, integer, long, Floating-point: float, double, Boolean: boolean and Date: date +* mapping contains one of - analyzed, not_analyzed and no (don't index this field) +* Default field type is string, string types are analyzed (transformed) +* Other types are not analzyed +* + +# Type and mappings +* _type, _id, _type, _version, _source +* Every type has its own mapping or schema definition, which defines the data structure for documents of that type, much like the columns in a database table. Documents of all types can be stored in the same index, but the mapping for the type tells Elasticsearch how the data in each document should be indexed. +* If your main database already has version numbers�or a value such as timestamp that can be used as a version number. version_type=external +* timeout, sync (disable for performance) +* _all - concatenated field +* You can always update partial mappings without explicitly mention about existing fields +* Same field can be analyzed and not_analyzed using mappings. (both raw search and full-text search) +* * _id and _index fields are neither indexed nor stored + +# TF/IDF +* Term frequency - How often does the term appear in the field? The more often, the more relevant. A field containing five mentions of the same term is more likely to be relevant than a field containing just one mention. + * How many times did the term honeymoon appear in the tweet field in this document? +* Inverse document frequency - How often does each term appear in the index? The more often, the less relevant. Terms that appear in many documents have a lower weight than more-uncommon terms. + * How many times did the term honeymoon appear in the tweet field of all documents in the index? +* Field-length norm - How long is the field? The longer it is, the less likely it is that words in the field will be relevant. A term appearing in a short title field carries more weight than the same term appearing in a long content field. + * How long is the tweet field in this document? The longer the field, the smaller this number. + +# Performance considerations +* To make sorting efficient, Elasticsearch loads all the values for the field that you want to sort on into memory. This is referred to as fielddata. +* Elasticsearch doesn�t just load the values for the documents that matched a particular query. It loads the values from every document in your index, regardless of the document type. +* query-then-fetch process (Two phase process is being used by ES for searching), It internally uses priority-queue, And try not-to-use deep paging unless spider needs to be supported +* + +# Analyzer +* We can create, or compose existing analyzer +* Can test independently with sample text +* The analyzer is not much use unless we tell Elasticsearch where to use it. +* Analyzer = char_filter + tokenizer + filter + analyzer + +# Settings +* action.auto_create_index: false +* number_of_shards, number_of_replicas - Two most important +* By default, the stopwords filter is disabled. + * GET /spanish_docs/_analyze?analyzer=es_std +* "date_detection": false -- inside mappings +* dynamic_date_formats +* dynamic_templates +* The _default_ mapping is a good place to specify index-wide dynamic templates. +# elasticsearch/config/jvm.options file is on windows, where we can modify stack size of elasticsearch + +# REINDEXING +* Reindex using latest mapping using sroll and scan +* GET /old_index/_search?search_type=scan&scroll=1m +* Break a big reindex down into smaller jobs by filtering on a date or timestamp field: +* During reindexing, we should also filtering on a date field to match only documents that have been added since the last reindex process started. They are not available using scroll and scan, since they use snapshot view +* Index Aliases and Zero Downtime - Using alias to hide the reindex process +* Alias use cases + * Switch transparently between one index and another on a running cluster + * Group multiple indices (for example, last_three_months) + * Create �views� on a subset of the documents in an index +* Prefer to use index name with version number and hide the version of the index using alias +* + +# Weird Problems +* BOUNCING RESULTS - If two scores are same, two shards can list in two different order, prefer to use same shards to avoid surprise +* The timeout parameter tells the coordinating node how long it should wait before giving up and just returning the results that it already has. + +## Default ports +* Kibana - http://localhost:5601/app/kibana#/home?_g=() +* ElasticSearch - http://127.0.0.1:9200/_search + +# Plugins +bin/elasticsearch-plugin install http://some.domain/path/to/plugin.zip +bin/elasticsearch-plugin install file:///C:/path/to/plugin.zip + +# Comparision +* Relational DB ⇒ Databases ⇒ Tables ⇒ Rows ⇒ Columns +* Elasticsearch ⇒ Indices ⇒ Types ⇒ Documents ⇒ Fields +* Relational DB ⇒ B-Tree ⇒ Index +* Elasticsearch ⇒ Inverted index ⇒ Index + + +# ElasticSearch Queries +``` +GET /megacorp/employee/_search?q=last_name:Smith + +``` + +# https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html +# [Elasticsearch Blogs](https://www.elastic.co/blog) +# [This Week in Elasticsearch and Apache Lucene](https://www.elastic.co/blog/this-week-in-elasticsearch-and-apache-lucene-2019-01-11) diff --git a/src/main/md/Tools/gitCodeSearch.md b/src/main/md/Tools/gitCodeSearch.md new file mode 100644 index 00000000..1c24932b --- /dev/null +++ b/src/main/md/Tools/gitCodeSearch.md @@ -0,0 +1,10 @@ +* git clone https://github.com/camunda/camunda-bpm-webapp.git +* git grep -2 spring /* details of the search result, with prior 2 line and following 2 lines as context */ +* git grep -c spring /* summary of search with counts */ +* git grep -e ProcessDefinitionDto --and -e result /* search for the word ProcessDefinitionDto followed by result in same line*/ +* If we need to find *When* a term started come to existence, use git log search + * git log -S queryParameter --oneline + * git log -p -S queryParameter /* search the word in the commit diff */ + * git log --all --grep='queryParameter' /* search the word across all branch in commit*/ + * git grep queryParameter $(git rev-list --all) /* Might throw Argument list too long */ + * git rev-list --all | xargs git grep queryParameter \ No newline at end of file diff --git a/src/main/md/Tools/github.md b/src/main/md/Tools/github.md new file mode 100644 index 00000000..47ed05e3 --- /dev/null +++ b/src/main/md/Tools/github.md @@ -0,0 +1,14 @@ + +# To preview github html source as normal html + * Append the url after http://htmlpreview.github.io/? + * http://htmlpreview.github.io/?https://github.com/thomaspark/bootswatch/blob/v3.3.5/cosmo/index.html + * http://htmlpreview.github.io/?https://github.com/mohanmca/MohanLearningGround/blob/master/src/main/webapp/random_numbers.html + +# Sample message for every repository +…or create a new repository on the command line +echo "# DroolsPOC" >> README.md +git init +git add README.md +git commit -m "first commit" +git remote add origin https://github.com/mohanmca/DroolsPOC.git +git push -u origin master \ No newline at end of file diff --git a/src/main/md/Tools/graal_jvm.md b/src/main/md/Tools/graal_jvm.md new file mode 100644 index 00000000..77a1e4cd --- /dev/null +++ b/src/main/md/Tools/graal_jvm.md @@ -0,0 +1,129 @@ +# Graal +* JIT with API (Truffle layer) +* Modular JIT compiler + * https://github.com/oracle/graal/tree/master/compiler/src + * core, debug, hotspot, api, asm + * loop, phases, truffle + * amd64, sparc +* No circular dependency +* Better inlining and escape analysis +* Graal on JDK-8 requires bootstrap, as graal itself written in Java, it should be compiled and hotspot JVM should compile +* Graal on JDK-9 was already compiled using AOT, hence it doesn't require bootstrap +* PS Scavange cycles are 2.5% better for than non Graal vms for twitter (it is due to escape analysis) +* Old gen are 40MB more than non Graal vms for twitter (it is due to Graal itself requires additional RAM on JDK8) +* Graal uses 11% lesser CPU than C2 Hotspot. i.e $127 saving per CPU. +* java -XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCI -XX:+UseJVMCICompiler => can unlock GraalVM on JDK9 +* Twitter observed lots of performance improvement due to Scala usage, and they suspect Java may not get 10% of improvement, since Scala polymorphism is complext than Java +* Enterprise graal was giving 22% improvement compared to Open source Graal + * It is doing better inline, and better escape analysis + * It is not free + +# How Graal works +* Polyglot JIT VM + * Languages are implemented again to suit Graal-JIT-VM + * Call from Ruby to JS, and return would be within GraalVM + * No performance punishment +* Graal :: function(bytecode) => Machinecode +* Graal :: JIT (+ JIT Library) +* Truffle is a layer on top of Grall, and provides lots of interfaces for other languages to use JIT +* Prior to Graal, Host language compiled into bytecode, and bytecode at runtime compiled into machine code, and host langauge itself can't access JIT compiler (one way) +* In Graal, language can interact with JIT (two way) using Truffle, and change the parameters of optimization +* Works using technique called "AST self specialization" + * A node starts as generic type with un-initialized stated + * Over time its type becomes specialized, and compiled to optimized machine code + * Compilation using Partial evaluation produces Machine code + * Partial evaluation + * Take all the java code that interprets host language + * Inline the above with the host-language specialized AST tree + * Produce optimized machine code (Tom Stuart - https://www.youtube.com/watch?v=n_k6O50Nd-4) +* Using truffle we can deoptimize the machine code and deconstruct the AST + * It is required for languages like JS, Ruby are so dynamic, anything could happen there +* Cross language AST can be combined + * If ruby method often calls JS + * JS method is cloned and inlined into Ruby methods AST + + +# Truffle +* Infrastructure for languages to interoperate +* All the lanugages are implemented using common interface inside VM +* So two different language can invoke each other since they are implemented using common infrastructure +* +```javascript +Truffle::Interop.eval('application/javascript'," +function add(a,b) { return a + b; } +Interop.export('add',add.bind(this)); +") +add = Truffle::Interop.import('add'); +puts add.call(42,24) +``` + +# Hotspot JVM + +* Java HotSpot VM + * Just-in-time (JIT) compiler + * Byte codes are converted into highly optimized machine code + +* When Hotspot vm kicks-in + * A method become eligible to be compiled once it was invoked many times, It is scheduled for compilation into machine code + * Compilation happens in separate thread, in parallel to execution of non-compiled method invoked + * -XX:+PrintCompilation - can be used to know the compilation details + +* Hotspot optimization + * Java HotSpot VM will try to inline methods that contain less than 35 bytes of JVM bytecode. + * Java HotSpot VM makes is monomorphic dispatch + * All method call has only one implementation + * There is only a single observed type per call site (i.e. the call site is "monomorphic") + * There are two observed types per call site (i.e. the call site is "bimorphic") + * Eliminate the overhead of doing virtual method lookup + * Loop optimization, type sharpening, dead-code elimination, and intrinsics + +* Threee modes + * Java HotSpot VM, there are actually two separate JIT compiler modes + * C1 is used for applications where quick startup and rock-solid optimization are required + * GUI application + * -client mode + * C2 was originally intended for long-running, predominantly server-side applications. + * -server mode + * Tiered compilation + * Start with C1 + * Switch to C2 + * Default mode of Java SE 8 + +* Logging Hotspot JVM + * -XX:+UnlockDiagnosticVMOptions -XX:+LogCompilation + * Will produce hotspot_pid.log + * -XX:+UnlockDiagnosticVMOptions -XX:+LogCompilation -XX:LogFile=MyProcHSJVM.log + * Customize the log file name + +# Hotspot JVM Issues + +* Old and complex +* Old code base +* No real optimization in the last two years (as on 2018) + +# Other + +* Java 9 introduced compact string + * Many characters require 16 bits to represent them but statistically most require only 8 bits — LATIN-1 character representation. + * private final byte coder; static final byte LATIN1 = 0; static final byte UTF16 = 1; //in java.lang.String + * coder can be either LATIN1 or UTF16 + * java -XX:+UnlockExperimentalVMOptions -XX:+EnableJVMCI -XX:+UseJVMCICompiler => can unlock GraalVM on JDK9 + +# Tools + +* Heapster provides an agent library to do heap profiling for JVM processes with output compatible with Google perftools. The goal of Heapster is to be able to do meaningful (sampled) heap profiling in a production setting. +* gperftools is a collection of a high-performance multi-threaded malloc() implementation, plus some pretty nifty performance analysis tools. + + +# References + +* [GeeCON Prague 2017: Chris Thalinger - Twitter's quest for a wholly Graal runtime](https://www.youtube.com/watch?v=pR5NDkIZBOA) +* [Graal: High-Performance Polyglot Runtime by Thomas Wuerthinger and Aleksandar Prokopec](https://www.youtube.com/watch?v=TQMKPRc6cbE +* [Compilers For Free by Tom Stuart](https://www.youtube.com/watch?v=n_k6O50Nd-4)) +* https://www.slideshare.net/ThomasWuerthinger/graal-truffle-ethdec2013 +* https://shipilev.net/blog/2015/black-magic-method-dispatch/#_monomorphic_cases +* http://www.oracle.com/technetwork/articles/java/architect-evans-pt1-2266278.html +* https://github.com/gperftools/gperftools +* https://github.com/mariusae/heapster +* http://www.baeldung.com/java-9-compact-string +* [One VM to Rule Them All, One VM to Bind Them](https://www.youtube.com/watch?time_continue=1959&v=FJY96_6Y3a4) diff --git a/src/main/md/Tools/intellij_keys.md b/src/main/md/Tools/intellij_keys.md new file mode 100644 index 00000000..655c0190 --- /dev/null +++ b/src/main/md/Tools/intellij_keys.md @@ -0,0 +1,8 @@ +* Show hierarchy of a class - Ctl + H +* Format Code - Ctl + Alt + L +* Maximize - Ctl + Shift + F12 +* +# Code generation +``` +psvm - main methods +``` \ No newline at end of file diff --git a/src/main/md/Tools/jenkins/Jenkins.md b/src/main/md/Tools/jenkins/Jenkins.md new file mode 100644 index 00000000..7670cf68 --- /dev/null +++ b/src/main/md/Tools/jenkins/Jenkins.md @@ -0,0 +1,182 @@ +* Jenkins - Build pipeline tool +* [Alternative to Jenkins](https://www.slant.co/options/2477/alternatives/~jenkins-alternatives) +* (https://jenkins.io/doc/book/pipeline/) +* Jenkins tasks + * Cron on steroids + * Automate Mundanity + * Dev to Production + * Continious Integration + * Continious Delivery + * Reliable, fast and feedback via mail +* Jenkins configuration and commands + * /home/nikias/.jenkins/secrets/initialAdminPassword + * D:\Apps\jenkins-2.157\secrets\initialAdminPassword + * http://localhost:8080/ +* Jenkins History + * CruiseControl - 2001 - XML + * Hudson - Kohsuke Kawaguchi @SUN - 2004 + * Hudons first release @SUN - 2005 + * Oracle acquires Sun - 2009 - + * Oracle retains Hudson - 2011 + * Jenkins - 2011 + * CloudBees - Jenkins SAAS - 2014 + * CloudBees - Jenkins2 - 2016 +* Jenkins Security + * Jenkins > Configure Global Security > Access Control > Security Realm + * Jenkins > Configure Global Security > Access Control > Authorization + * Jenkins > Configure Global Security > Authorization > Allow anonymous read access + * Jenkins > Manage > Manager Users > Create User + +* Jenkins using docker +``` +https://hub.docker.com/_/jenkins +docker run -p 8090:8080 -p 50000:50000 -v /your/home:/var/jenkins_home jenkins +docker run --name myjenkins -p 8080:8080 -p 50000:50000 -v /your/home:/var/jenkins_home jenkins +``` +## Anatomy of the build +* Clone +* Compile +* Unit Test +* Package + +* Jenkins workspace contains all the checked out project +* Job and build are different +* Jobs are template, and build are instances of the job +* DISABLE AUTO REFRESH/ENABLE AUTO REFRESH +* "General" > "Discard old builds" > "Max # of builds to keep" +* Features + * Post-build actions + * Archive artifacts + * http://localhost:8080/job/Project/12/artifact/artifacts/spring-boot-sample-atmosphere-1.4.0.BUILD-SNAPSHOT.jar + * http://localhost:8080/job/Project/13/artifact/artifacts/spring-boot-sample-atmosphere-1.4.0.BUILD-SNAPSHOT.jar + * Send email notifications + * Build trend + * Build staibility: 1 out of last 5 builds failed. 80% + * Reload configurations from the disk + * Jenkins > System Log > All Jenkins Log + * Jenkins > Configuration > Environment variables > "You can set $PATH" + * Manage Jenkins > "Global Tool Configuration" > "Add JDK" + * Manage Jenkins > "Global Tool Configuration" > "Add Maven" + * Poll SCM interpretation and help syntax and english interpretation for cron +* Master node and Agent Model +* Running Junit test across multiple machines + +## Jenkins Poll SCM +* "* * * * * *" + * Do you really mean "every minute" when you say "* * * * *"? + * Would last have run at Saturday, 12 January, 2019 10:49:05 AM SGT; would next run at Saturday, 12 January, 2019 11:49:05 AM SGT. +* "* * * * * *" + * Do you really mean "every minute" when you say "* * * * *"? +* H/15 * * * * + * every fifteen minutes (perhaps at :07, :22, :37, :52) + +# Jenkins pipeline job +* Jenkins > create_new_pipeline_job > somebuild_dummy_pipeline > Pipeline Syntax +* Jenkins > pipleline_job > Pipeline Syntax > Generate Pipeline Script +* Open search can be used to search directly from browswer intead visiting Jenkins + * type "configure" in jenkins search +* Find list of template by following to steps + * Navigate to http://localhost:8080/job/pipleline_job/pipeline-syntax/ + * Fire this JS from console + ```javascript + Array.from(document.getElementsBySelector("#main-panel > form > table > tbody > tr:nth-child(4) > td.setting-main > select")[0].childElements()).map(_ => _.innerText).join("\n") + ``` + * stage in pipeline is optional, but quite useful. pipeline configuration can work without stage defined in groovy script. + * stage names could be anything, generally "checkout", "build", "package" and "archive" + * When test-reports are not displayed ensure, archival happens after all the steps, archival error might stop reporting test-reports. + * Jenkins > build pipeline > #12 > Pipeline Steps > "Allocate node" > "Workspace" + * Only way to access workspace in pipleine build + * + + + + +# Docker would run on port 8080 (within its VM), It is acceisble to outside on port 8090. +```sh +docker run -p 8090:8080 -p 50000:50000 -v /your/home:/var/jenkins_home jenkins +# Docker SMTP Server +docker run --restart unless-stopped --name mailhog -p 1025:1025 -p 8025:8025 -d mailhog/mailhog +``` + +# Jenkins URLS +* [Configure](http://localhost:8080/configure) +* [Test History](http://localhost:8080/job/buildname/6/testReport/history/) +* [Pipeline Steps](http://localhost:8080/job/petclinic/23/flowGraphTable/) +* [Code Coverage](http://localhost:8080/job/petclinic/23/Code_20Coverage/) + +## List of templates +* archiveArtifacts: Archive the artifacts +* bat: Windows Batch Script +* build: Build a job +* checkout: Check out from version control +* cleanWs: Delete workspace when build is done +* deleteDir: Recursively delete the current directory from the workspace +* dir: Change current directory +* echo: Print Message +* emailext: Extended Email +* emailextrecipients: Extended Email Recipients +* error: Error signal +* fileExists: Verify if file exists in workspace +* fingerprint: Record fingerprints of files to track usage +* git: Git +* input: Wait for interactive input +* isUnix: Checks if running on a Unix-like node +* junit: Archive JUnit-formatted test results +* library: Load a shared library on the fly +* libraryResource: Load a resource file from a shared library +* load: Evaluate a Groovy source file into the Pipeline script +* lock: Lock shared resource +* mail: Mail +* milestone: The milestone step forces all builds to go through in order +* node: Allocate node +* parallel: Execute in parallel +* powershell: PowerShell Script +* properties: Set job properties +* pwd: Determine current directory +* readFile: Read file from workspace +* readTrusted: Read trusted file from SCM +* resolveScm: Resolves an SCM from an SCM Source and a list of candidate target branch * names +* retry: Retry the body up to N times +* script: Run arbitrary Pipeline script +* sh: Shell Script +* sleep: Sleep +* stage: Stage +* stash: Stash some files to be used later in the build +* step: General Build Step +* svn: Subversion +* timeout: Enforce time limit +* timestamps: Timestamps +* tm: Expand a string containing macros +* tool: Use a tool from a predefined Tool Installation +* unstash: Restore files previously stashed +* validateDeclarativePipeline: Validate a file containing a Declarative Pipeline +* waitUntil: Wait for condition +* withAnt: With Ant +* withCredentials: Bind credentials to variables +* withEnv: Set environment variables +* wrap: General Build Wrapper +* writeFile: Write file to workspace +* ws: Allocate workspace +* — Advanced/Deprecated — + * archive: Archive artifacts + * catchError: Catch error and set build result + * dockerFingerprintFrom: Record trace of a Docker image used in FROM + * dockerFingerprintRun: Record trace of a Docker image run in a container + * envVarsForTool: Fetches the environment variables for a given tool in a list of * 'FOO=bar' strings suitable for the withEnv step. + * getContext: Get contextual object from internal APIs + * unarchive: Copy archived artifacts into the workspace + * withContext: Use contextual object from internal APIs within a block + * withDockerContainer: Run build steps inside a Docker container + * withDockerRegistry: Sets up Docker registry endpoint + * withDockerServer: Sets up Docker server endpoint" + +## References +* [Jenkins Handbook] (https://jenkins.io/doc/book/) +* [Jenkins pipeline docs](https://jenkins.io/doc/book/pipeline/) +* [Jenkins Pipeline](https://github.com/jenkinsci/pipeline-plugin/blob/master/COMPATIBILITY.md) +* [Jenkins update site](https://updates.jenkins-ci.org/experimental/update-center.json) +* [Getting started with Jenkins 2](https://www.pluralsight.com/courses/jenkins-2-getting-started) +* [Pluralsight Jenkins course] (https://github.com/g0t4/jenkins2-course-spring-boot) +* https://gist.github.com/g0t4/747cd20e8563aefc3eac444166983142 +* https://github.com/g0t4/jenkins2-course-spring-petclinic +* D:\project\jenkins\jenkins2-course-spring-boot diff --git a/src/main/md/Tools/jenkins/JenkinsAdmin.groovy b/src/main/md/Tools/jenkins/JenkinsAdmin.groovy new file mode 100644 index 00000000..441964c5 --- /dev/null +++ b/src/main/md/Tools/jenkins/JenkinsAdmin.groovy @@ -0,0 +1,49 @@ +import hudson.tools.ToolDescriptor; +import hudson.tools.ToolInstallation; + +Jenkins.instance.pluginManager.plugins.collect{it -> println(it)} + + +void printAllMethods( obj ){ + if( !obj ){ + println( "Object is null\r\n" ); + return; + } + if( !obj.metaClass && obj.getClass() ){ + printAllMethods( obj.getClass() ); + return; + } + def str = "class ${obj.getClass().name} functions:\r\n"; + obj.metaClass.methods.name.unique().each{ + str += it+"();\r\n"; + } + println "${str}"; +} + +TaskListener log; + +for (ToolDescriptor desc : ToolInstallation.all()) { + for (ToolInstallation inst : desc.getInstallations()) { + println ('\tTool Name: ' + inst.getName()); + println ('\t\tTool Home: ' + inst.translateFor(Jenkins.instance,log)); + } +} + + +//printAllMethods(Jenkins.instance) +System.getProperties().collect{it -> println(it)} +//System.getProperties().forEach((k, v) -> System.out.println(k + ":" + v)); + +import hudson.util.RemotingDiagnostics +import jenkins.model.Jenkins + +println System.getenv + +groovy_script = ''' +println System.getenv("PATH") +println "uname -a".execute().text +'''.trim() + +String result +Jenkins.instance.slaves.collect { agent -> result = RemotingDiagnostics.executeGroovy(groovy_script, agent.channel) } +println result \ No newline at end of file diff --git a/src/main/md/Tools/jenkins/Jenkins_Code.md b/src/main/md/Tools/jenkins/Jenkins_Code.md new file mode 100644 index 00000000..a93a5cac --- /dev/null +++ b/src/main/md/Tools/jenkins/Jenkins_Code.md @@ -0,0 +1,5 @@ +# Learnings from Jenkins code-base of Tags/Jenkins-2.162 (Last release of Jan-2019) +```bash +git clone https://github.com/jenkinsci/jenkins.git +git checkout tags/jenkins-2.162 +``` \ No newline at end of file diff --git a/src/main/md/Tools/jenkins/jenkins_pipeline_script.md b/src/main/md/Tools/jenkins/jenkins_pipeline_script.md new file mode 100644 index 00000000..d1977599 --- /dev/null +++ b/src/main/md/Tools/jenkins/jenkins_pipeline_script.md @@ -0,0 +1,214 @@ +```groovy +node { + notify('Started') + try { + stage 'checkout' + git 'https://github.com/mohanmca/jenkins2-course-spring-boot.git' + def project_path = "spring-boot-samples/spring-boot-sample-atmosphere" + + jdk = tool name: 'jdk8' + env.JAVA_HOME = "${jdk}" + + dir(project_path) { + stage 'build and package' + withEnv(['MAVEN_HOME=D:\\Apps\\apache-maven-3.6.0']) { + bat '%MAVEN_HOME%\\bin\\mvn clean package' + } + stage 'archive' + archiveArtifacts "target/*.[jw]ar" + } + + notify('Build Completed!') + } catch(err) { + notify("Build Failed! - ${err}") + currentBuild.result = "Failure!" + } +} +def notify(status){ + emailext ( + to: "you@gmail.com", + subject: "${status}: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", + body: """

${status}: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]':

+

Check console output at ${env.JOB_NAME} [${env.BUILD_NUMBER}]

""", + ) +} +``` +## Jenkins parallel pipeline +```groovy +stage 'CI' +node { + + git branch: 'jenkins2-course', + url: 'https://github.com/mohanmca/solitaire-systemjs-course.git' + + // pull dependencies from npm + // on windows use: bat 'npm install' + + nodejs('NodeJSv10.15.0') { + bat 'npm install' + } + + + // stash code & dependencies to expedite subsequent testing + // and ensure same code & dependencies are used throughout the pipeline + // stash is a temporary archive + stash name: 'everything', + excludes: 'test-results/**', + includes: '**' + + // test with PhantomJS for "fast" "generic" results + // on windows use: bat 'npm run test-single-run -- --browsers PhantomJS' + nodejs('NodeJSv10.15.0') { + bat 'npm run test-single-run -- --browsers Chrome' + } + + // archive karma test results (karma is configured to export junit xml files) + step([$class: 'JUnitResultArchiver', + testResults: 'test-results/**/test-results.xml']) + +} + +node { + bat 'ls' + bat 'rm -rf *' + unstash 'everything' + bat 'ls' +} + +// parallel integration testingstage 'Browser Testing' + +parallel chrome: { + runTests('Chrome') +}, firefox: { + runTests('Firefox') +}, safari: { + runTests('Safari') +} + +def runTests(browser) { + node { + bat 'rm -rf *' + unstash 'everything' + bat 'npm run test-single-run -- --browsers ${browser}' + step([$class: 'JUnitResultArchiver', testResults: 'test-results/**/test-results.xml']) + } +} + + +def notify(status){ + emailext ( + to: "wesmdemos@gmail.com", + subject: "${status}: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", + body: """

${status}: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]':

+

Check console output at ${env.JOB_NAME} [${env.BUILD_NUMBER}]

""", + ) +} +``` + + +```groovy +//without stage +node { + git 'https://github.com/g0t4/jenkins2-course-spring-boot.git' + def project_path = "spring-boot-samples/spring-boot-sample-atmosphere" + + jdk = tool name: 'jdk8' + env.JAVA_HOME = "${jdk}" + + dir(project_path) { + withEnv(['MAVEN_HOME=D:\\Apps\\apache-maven-3.6.0']) { + bat '%MAVEN_HOME%\\bin\\mvn clean package' + } + archiveArtifacts "target/*.jar" + } +} +``` + +```groovy +node { + stage 'checkout' + git 'https://github.com/g0t4/jenkins2-course-spring-boot.git' + def project_path = "spring-boot-samples/spring-boot-sample-atmosphere" + + jdk = tool name: 'jdk8' + env.JAVA_HOME = "${jdk}" + + dir(project_path) { + stage 'build and package' + withEnv(['MAVEN_HOME=D:\\Apps\\apache-maven-3.6.0']) { + bat '%MAVEN_HOME%\\bin\\mvn clean package' + } + stage 'archive' + archiveArtifacts "target/*.jar" + } +} +def notify(status){ + emailext ( + to: "you@gmail.com", + subject: "${status}: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]'", + body: """

${status}: Job '${env.JOB_NAME} [${env.BUILD_NUMBER}]':

+

Check console output at ${env.JOB_NAME} [${env.BUILD_NUMBER}]

""", + ) +} +``` + + +```groovy +#!groovy +@Library('github.com/walkmod/jenkins-pipeline-shared@maven') _ +pipeline { + agent any + stages { + stage ('Fixing Release'){ + steps { + walkmodApply( + validatePatch: false, + branch: env.BRANCH_NAME, + alwaysApply: true, + alwaysFail: true) + } + } + stage('Build') { + steps { + sh "mvn package" + } + } + stage('Results') { + steps { + archive 'target/*.jar' + } + } + } +} +``` + +```groovy +node { + def mvnHome + stage('Preparation') { // for display purposes + // Get some code from a GitHub repository + git 'https://github.com/jglick/simple-maven-project-with-tests.git' + // Get the Maven tool. + // ** NOTE: This 'M3' Maven tool must be configured + // ** in the global configuration. + mvnHome = tool 'M3' + } + stage('Build') { + // Run the maven build + if (isUnix()) { + sh "'${mvnHome}/bin/mvn' -Dmaven.test.failure.ignore clean package" + } else { + bat(/"${mvnHome}\bin\mvn" -Dmaven.test.failure.ignore clean package/) + } + } + stage('Results') { + junit '**/target/surefire-reports/TEST-*.xml' + archive 'target/*.jar' + } +} +``` + +# References +* [Pipline Gist Search](https://gist.github.com/search?q=filename:Jenkinsfile) +* [Pipeline email](https://gist.github.com/g0t4/747cd20e8563aefc3eac444166983142) +* [Pipeline globals](http://localhost:8080/job/pipleline_job/pipeline-syntax/globals) \ No newline at end of file diff --git a/src/main/md/Tools/jenkins/jenkins_reflection_pipleline.md b/src/main/md/Tools/jenkins/jenkins_reflection_pipleline.md new file mode 100644 index 00000000..e46d4842 --- /dev/null +++ b/src/main/md/Tools/jenkins/jenkins_reflection_pipleline.md @@ -0,0 +1,59 @@ +import hudson.model.* +import hudson.EnvVars +import groovy.json.JsonSlurperClassic +import groovy.json.JsonBuilder +import groovy.json.JsonOutput +import java.net.URL +import hudson.tools.ToolDescriptor; +import hudson.tools.ToolInstallation; +import hudson.util.RemotingDiagnostics +import jenkins.model.Jenkins + +node { + sh "set" + stage('let us try ') { + + Jenkins.instance.pluginManager.plugins.collect{plugins -> println(plugins)} + TaskListener log; + + for (ToolDescriptor desc : ToolInstallation.all()) { + for (ToolInstallation inst : desc.getInstallations()) { + println ('\tTool Name: ' + inst.getName()); + println ('\t\tTool Home: ' + inst.translateFor(Jenkins.instance,log)); + } + } + + + //printAllMethods(Jenkins.instance) + System.getProperties().collect{props -> println(props)} + + + println System.getenv + + groovy_script = ''' + println System.getenv("PATH") + println "uname -a".execute().text + '''.trim() + + String result + Jenkins.instance.slaves.collect { agent -> result = RemotingDiagnostics.executeGroovy(groovy_script, agent.channel) } + println result + } +} + + def printAllMethods( obj ) + { + if( !obj ){ + println( "Object is null\r\n" ); + return; + } + if( !obj.metaClass && obj.getClass() ){ + printAllMethods( obj.getClass() ); + return; + } + def str = "class ${obj.getClass().name} functions:\r\n"; + obj.metaClass.methods.name.unique().each{ + str += it+"();\r\n"; + } + println "${str}"; +} \ No newline at end of file diff --git a/src/main/md/Tools/jenkins/jenkins_urls.md b/src/main/md/Tools/jenkins/jenkins_urls.md new file mode 100644 index 00000000..fb3c0675 --- /dev/null +++ b/src/main/md/Tools/jenkins/jenkins_urls.md @@ -0,0 +1,3 @@ +* [Environment Variables](http://localhost:8080/env-vars.html/) +* [Restart](http://jenkins_url:8080/safeRestart|restart) +* [Each Jenkins Page has API link at the bottom of the page](http://localhost:8080/env-vars.html/) \ No newline at end of file diff --git a/src/main/md/Tools/jupyter.md b/src/main/md/Tools/jupyter.md new file mode 100644 index 00000000..b67a5361 --- /dev/null +++ b/src/main/md/Tools/jupyter.md @@ -0,0 +1,25 @@ +## Jupyter docker images +* https://jupyter-docker-stacks.readthedocs.io/en/latest/using/selecting.html + * jupyter/scipy-notebook + * jupyter/tensorflow-notebook + * jupyter/datascience-notebook + * jupyter/pyspark-notebook + * jupyter/all-spark-notebook + * docker run -p 8888:8888 jupyter/scipy-notebook:17aba6048f44 + + +```bash +sudo sh +sudo apt-get install python3 +pip install scikit-learn +apt install python-pip +apt install python3-pip python3-dev +sudo -H pip3 install --upgrade pip +sudo -H pip3 install virtualenv +mkdir ~/ml +cd ~/ml/ +virtualenv ml +source ml/bin/activate +pip install jupyter +jupyter notebook --allow-root --ip=0.0.0.0 --port=80 +``` diff --git a/src/main/md/Tools/k8s/CKAD.preparation.md b/src/main/md/Tools/k8s/CKAD.preparation.md new file mode 100644 index 00000000..c0506d5f --- /dev/null +++ b/src/main/md/Tools/k8s/CKAD.preparation.md @@ -0,0 +1,23 @@ +# Scope to cover +* [List of topics tested](https://github.com/cncf/curriculum/blob/master/CKAD_Curriculum_V1.14.1.pdf) + +* Complete "Kubernetes in Action" - book +* Udemey - CKAD course +* Kubectl and -dry-run should be familiar +* Be well versed with vi editor and tmux +* Katakode kubernetes courses + + +# Practice for exam +* Repeat mock exam from udemey course +* Setup minikube based cluster locally +* Complete Tom Armstrong CKAD prep notes - https://github.com/twajr/ckad-prep-notes +* Practice this like exam + * https://github.com/dgkanatsios/CKAD-exercises + * Mathew palmer https://matthewpalmer.net/kubernetes-app-developer/ + + +# Reference +* (https://www.reddit.com/r/kubernetes/comments/a6ecnq/passed_the_ckad_as_thanks_to_the_community_heres/) +* (https://medium.com/@nassim.kebbani/how-to-beat-kubernetes-ckad-certification-c84bff8d61b1) +* (https://medium.com/@ContinoHQ/the-ultimate-guide-to-passing-the-cka-exam-1ee8c0fd44cd) \ No newline at end of file diff --git a/src/main/md/Tools/k8s/CNDWithK8s.md b/src/main/md/Tools/k8s/CNDWithK8s.md new file mode 100644 index 00000000..0514b559 --- /dev/null +++ b/src/main/md/Tools/k8s/CNDWithK8s.md @@ -0,0 +1,28 @@ + * Error encountered while starting minicube + ```pre + X Unable to start VM: start: Unable to start the VM: D:\Apps\Oracle\VirtualBox\VBoxManage.exe startvm minikube --type headless failed: +VBoxManage.exe: error: Call to WHvSetupPartition failed: ERROR_SUCCESS (Last=0xc000000d/87) (VERR_NEM_VM_CREATE_FAILED) +VBoxManage.exe: error: Details: code E_FAIL (0x80004005), component ConsoleWrap, interface IConsole + ``` + * Above error was caused by HyperV usage. To disable HyperV + bcdedit /set hypervisorlaunchtype off + https://github.com/kubernetes/minikube/issues/4587 + + + * minikube / kubectl commands + ```bash + minikube version + minikube start + minikube stop + kubectl get nodes + kubectl get pods + minikube dashboard + ``` + + * Docker hello world + ```bash + docker container run -p 9999:8888 --name hello cloudnatived/demo:hello + ``` + + ## Reference + * https://github.com/cloudnativedevops/demo.git \ No newline at end of file diff --git a/src/main/md/Tools/k8s/Kubernetes.md b/src/main/md/Tools/k8s/Kubernetes.md new file mode 100644 index 00000000..a711d362 --- /dev/null +++ b/src/main/md/Tools/k8s/Kubernetes.md @@ -0,0 +1,58 @@ +* With container images, we confine the application code, its runtime, and all of its dependencies in a pre-defined format. And, with container runtimes like runC, containerd, or rkt we can use those pre-packaged images, to create one or more containers. +* We would like to have a fault-tolerant and scalable solution, which can be achieved by creating a single controller/management unit - referred as "container orchestrator" +* Container orchestrators are the tools which group hosts together to form a cluster, and help us fulfill the following requirements + * fault-tolerant + * on-demand scaling + * discover and communicate with other applications + * Can update/rollback without any downtime +* Container orchestrators + * Docker Swarm + * Kubernetes + * Mesos Marathon + * Amazon ECS + * Hashicorp Nomad +* Container orchestrators can: + * Bring multiple hosts together and make them part of a cluster + * Schedule containers to run on different hosts + * Help containers running on one host reach out to containers running on other hosts in the cluster + * Bind containers and storage + * Bind containers of similar type to a higher-level construct, like services, so we don't have to deal with individual containers + * Keep resource usage in-check, and optimize it when necessary + * Allow secure access to applications running inside containers. + +* Kubernetes based on Borg, which is a cluster manager created by Google + * "Google's Borg system is a cluster manager that runs hundreds of thousands of jobs from many thousands of different applications, across a number of clusters each with up to tens of thousands of machines. + * Kubernetes offers a very rich set of features for container orchestration. + * Automatic binpacking + * Self-healing + * Horizontal scaling + * Service discovery and Load balancing + * Automated rollouts and rollbacks + * Secrets and configuration management + * Storage orchestration + * Batch execution + * Kubernetes + * Kubernetes is an open-source system for automating deployment, scaling, and management of containerized applications. + * Manager for shipping containers + * It is an open source project written in the Go language + * Kubernetes has new releases every three months. The current stable version is 1.11 (as of September 2018). + +* Kubernetes Architecture + * K8S Architecture: ![K8S Architecture][arch] + * One or more master nodes, only one of them will be the leader + * One or more worker nodes + * Distributed key-value store, like etcd. + +* Master node + * API server + * Scheduler + * Controller manager + * etcd (Key value store) + + + # Image references + [arch]: img/Kubernetes_Architecture1.png "Kubernetes Architecture" + + # Reference + * (Case Studies)[https://kubernetes.io/case-studies/] + \ No newline at end of file diff --git a/src/main/md/Tools/k8s/Minikube.md b/src/main/md/Tools/k8s/Minikube.md new file mode 100644 index 00000000..8c8e6d2d --- /dev/null +++ b/src/main/md/Tools/k8s/Minikube.md @@ -0,0 +1,17 @@ +```cmd +install choclatey - @"%SystemRoot%\System32\WindowsPowerShell\v1.0\powershell.exe" -NoProfile -InputFormat None -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))" && SET "PATH=%PATH%;%ALLUSERSPROFILE%\chocolatey\bin" +install virtualbox +@ echo off choco install minikube kubernetes-cli +kubectl version +minikube addons enable metrics-server +minikube start +minikube start --vm-driver=hyperv --kubernetes-version="v1.14" +minikube delete +minikube dashboard +kubectl version --client +kubectl config current-context +kubectl get nodes +``` + +* https://instruqt.com/public/tracks/deploying-an-app-on-kubernetes/ +* https://www.katacoda.com/courses/kubernetes \ No newline at end of file diff --git a/src/main/md/Tools/k8s/get_started.md b/src/main/md/Tools/k8s/get_started.md new file mode 100644 index 00000000..18fc7108 --- /dev/null +++ b/src/main/md/Tools/k8s/get_started.md @@ -0,0 +1,49 @@ +```bash + minikube stop + minikube delete + minikube version + minikube start --v=0 > kubernetes.log + minikube start -v=10 > kubernetes.log + #minikube version: v1.2.0 + minikube dashboard +``` + +``` + * minikube v1.2.0 on windows (amd64) + * Creating virtualbox VM (CPUs=2, Memory=2048MB, Disk=20000MB) ... + * Found network options: + - NO_PROXY=192.168.99.100 + * Configuring environment for Kubernetes v1.15.0 on Docker 18.09.6 + - env NO_PROXY=192.168.99.100 + * Pulling images ... + * Launching Kubernetes ... + * Verifying: apiserver proxy etcd scheduler controller dns + * Done! kubectl is now configured to use "minikube" +``` + + +* After started +``` +kubectl get pod --all-namespaces +kubectl get deployment --all-namespaces +#install docker-toolbox +Use "Docker Quickstart Terminal" +docker run hello-world +git clone https://github.com/cloudnativedevops/demo.git +cd demo/hello +docker image build -t mhello . +docker run -p 9999:8888 mhello:latest +# Find IP address of the docker host machine +docker-machine ip default +http://192.168.99.100:9999/ + +kubectl run demo --image=YOUR_DOCKER_ID/myhello --port=9999 --labels app=demo +kubectl run demo --image=mhello:latest --port=9999 --labels app=demo + +minikube start -p onecluster +kubectl run demo --image=cloudnatived/demo:hello --port=9999 --labels app=demo +#kubectl run --generator=deployment/apps.v1 is DEPRECATED and will be removed in a future version. Use kubectl run --generator=run-pod/v1 or kubectl create instead. +#deployment.apps/demo created +kubectl port-forward deploy/demo 9999:8888 +curl http://localhost:9999/ +``` \ No newline at end of file diff --git a/src/main/md/Tools/k8s/img/Kubernetes_Architecture1.png b/src/main/md/Tools/k8s/img/Kubernetes_Architecture1.png new file mode 100644 index 00000000..1147187d Binary files /dev/null and b/src/main/md/Tools/k8s/img/Kubernetes_Architecture1.png differ diff --git a/src/main/md/Tools/k8s/k8s_author_influential.md b/src/main/md/Tools/k8s/k8s_author_influential.md new file mode 100644 index 00000000..5d09977e --- /dev/null +++ b/src/main/md/Tools/k8s/k8s_author_influential.md @@ -0,0 +1,17 @@ +* Three co-creators of Kubernetes + * Joe Beda + * Craig McLuckie of VMware + * Brendan Burns of Microsoft +* Brendon Burns +* David Oppenheimer +* Kelsey Hightower + +* [OPEN FAAS](https://github.com/openfaas/workshop) +* [Docker Kubernetes Lab Handbook](https://docker-k8s-lab.readthedocs.io/en/latest/) +* [Kubernetes the hard way](https://github.com/kelseyhightower/kubernetes-the-hard-way) +* [Design patterns for container-based distributed systems](https://www.usenix.org/system/files/conference/hotcloud16/hotcloud16_burns.pdf) +* [designing-distributed-systems-labs](https://github.com/brendandburns/designing-distributed-systems-labs) + +## Community + +* K8s General discussion (https://discuss.kubernetes.io/c/general-discussions?order=views) diff --git a/src/main/md/Tools/k8s/pod_deployment_definition.yaml b/src/main/md/Tools/k8s/pod_deployment_definition.yaml new file mode 100644 index 00000000..1046c135 --- /dev/null +++ b/src/main/md/Tools/k8s/pod_deployment_definition.yaml @@ -0,0 +1,23 @@ +# pod-deployment-definition.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: nginx-deployment + namespace: default + labels: + app: nginx +spec: + replicas: 3 + selector: + matchLabels: + app: nginx + template: + metadata: + labels: + app: nginx + spec: + containers: + - name: nginx + image: nginx:1.7.9 + ports: + - containerPort: 80 \ No newline at end of file diff --git a/src/main/md/Tools/logstash_kibana.md b/src/main/md/Tools/logstash_kibana.md new file mode 100644 index 00000000..f6a629d0 --- /dev/null +++ b/src/main/md/Tools/logstash_kibana.md @@ -0,0 +1,106 @@ + +## Logstash functions +* Ingest - Collect input from multiple sources +* Enhance/Modify - Enrich the input +* Forward - to any storage +* Configuration + * Input + * File + * Application log + * Beat input + * Filter + * How to parse data, Can perform lookup + * Ignore some + * Modify Any + * Output + * Where should we store the logs + * Back end? / Elasticserch? + +## Logstah plugins +* Apache logs +* log4j files +* Windows Event log +* Can do filter, and look-up geo-location +* Dozens of filters + +# Filters +* Grok filter + * Can parse unstructured data and convert into strutured data + * Web server logs into json storage +* Geoip + * Internally uses Grok filter + * Can covert IP address into rich set of information using lookup + +## Kibana +* General graphing and visualization tool +* Originally written in NodeJs +* installation + ```bash + wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add - + sudo apt-get update && apt-get install kibana + server.host: point_to_local_ip + server.name: "globo-kibana01" + elasticserach.url: "http://192.168.0.1:9200/" + ``` +* /etc/kibana/kibana.yml +* http://192.168.1:5601/app/kibana + +## Kibana Scripted field +``` +Integer.parseInt(doc['fileName'].value) +``` + +## Elasticsearch Installation +```bash +sudo apt-get install openjdk-8-jre-headless +mkdir pkg; cd pkg +wget http://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.0.0.deb +sudo dpkg -i elasticsearch-5.0.0.deb +/etc/elaticsearch/elasticsearch.yml +sudo apt-get install elasticsearch +sudo /etc/init.d/elasticsearch start +``` + +```bash +Change clustername and memory maps settings in kernel +cluster.name: globo-monitoring +network.host: bind_to_local_ip_address/0.0.0.0 +sysctl -w vm.max_map_count=262144 +service start elasticsearch +curl http://whateverIpd:9200/ +systemctl enable elasticsearch #--ensure system start would automatically start elasticsearch +``` + +## Windows service registration +```bat +* elasticsearch/config/jvm.options file is on windows, where we can modify stack size of elasticsearch -Xss1m +.\elasticsearch-service.bat install ElasticSearch +start-service ElasticSearch +Invoke-WebRequest http://localhost:9200/ +``` + +## logstash Installation +```bash +sudo apt-get install openjdk-8-jre-headless +wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add - +echo "deb https://artifacts.elastic.co/packages/5.x/apt stable main | tee -a /etc/apt/sources.list.d/elastic-5.x.list" +apt-get update && apt-get install logstash +cd /usr/share/logstsh; ll; +/usr/share/logstash/bin/logstash -e 'input { stdin { } } output { elasticsearch { hosts => ["192.168.0.1:9200"] } }' +# type some sample message for above command and ensure it is in elasticsearch +wget http://192.168.0.1:9200/logstash-*/search +logstash -f /home/nikias/logstash/cars_csv_logstash.config +``` + + +# Additional settings +* elasticsearch/config/jvm.options file is on windows, where we can modify stack size of elasticsearch +* cat /etc/issue.net -- shows the distribution of ubuntu +* systemctl can register service to ubuntu +* systemctl enable logstash +* systemctl logstash start + + +# References +* [Lecture 16 logstash job Kibana visualization](https://www.youtube.com/watch?v=imrKm6dV3NQ&t=532s) +* [Logstash] (https://github.com/mohanmca/MohanLearningGround/blob/master/src/main/resources/conf/logstash/apache_logstash.conf) \ No newline at end of file diff --git a/src/main/md/Tools/matching_engines.md b/src/main/md/Tools/matching_engines.md new file mode 100644 index 00000000..efb18618 --- /dev/null +++ b/src/main/md/Tools/matching_engines.md @@ -0,0 +1,52 @@ +* https://github.com/larsga/Duke/wiki/GettingStarted +* java no.priv.garshol.duke.Duke --verbose --progress --testdebug --showmatches C:\Users\mohan\git\Duke\duke-core\src\main\resources\duplicate.conf.xml +* If there is no group in configuration, it would be considered as running for deduplication +* Algorithm for comparing two records + * Find list of properties needs to be compared + * For each propery assign two probablity, if they don't match how much *low* probablity (0.2), if they match how much *high* probablity (0.8) + * For each property in records should be canonicalized/lemmatize/stemmed + * Compute similarity property using Levenshtein distance + * property_prob = (sim >= 0.5) ? ((high - low) * (sim * sim)) + low : low; + * Above would punish probablity if simularity is low using square technnique + * record_prob = Utils.computeBayes(record_prob, property_prob); //Combine the record matching problity using Naive bayes + * Refer duke-core\src\main\java\no\priv\garshol\duke\PropertyImpl.java and duke-core\src\main\java\no\priv\garshol\duke\Processor.java +* Alternative algorithm using cosine similarity, Bag-of-words_model + * Treat each word as a vector dimension. + * Each document is a vector + * The values on each dimension are the word coun + * Cosine similarity is straightforward from this representation + * + + +# References +* [Bayesian identity resolution](http://www.garshol.priv.no/blog/217.html) +* [Record_linkage#Identity_resolution](https://en.wikipedia.org/wiki/Record_linkage#Identity_resolution) +* [Mike Mull: The Art and Science of Data Matching](https://www.youtube.com/watch?v=Y-nYEOgq3YE) +* https://docs.dedupe.io/en/latest/How-it-works.html +* [Naive Bayes spam filtering](https://en.wikipedia.org/wiki/Naive_Bayes_spam_filtering) +* [Bag-of-words_model](https://en.wikipedia.org/wiki/Bag-of-words_model) +* [Email Filtering](https://en.wikipedia.org/wiki/Email_filtering) +* [Peter Christen: Session 1 - Record Linkage Workshop at the ADRC-Scotland, 13 July 2015](https://www.youtube.com/watch?v=DyGonV7A_EY) + * https://www.youtube.com/watch?v=DyGonV7A_EY&list=PL1gmpH4hgt0bSIOTWCOujVLRjhtwRIG7R +* [Text similarity](https://commons.apache.org/proper/commons-text/apidocs/org/apache/commons/text/similarity/package-summary.html) +* https://github.com/eklem/stopword-trainer +* https://github.com/eklem/stopword + +# Configurations + --progress show progress report while running + --showmatches show matches while running + --linkfile= output matches to link file + --interactive query user before outputting link file matches + --testfile= test matches against known correct results in file + --testdebug display failures + --verbose display diagnostics + --noreindex reuse existing Lucene index + --batchsize=n set size of Lucene indexing batches + --showdata show all cleaned data (data debug mode) + + --profile display performance statistics + --threads=N run processing in N parallell threads + --pretty pretty display when comparing records + --singlematch (in record linkage mode) only accept + the best match for each record + --lookups display lookup properties \ No newline at end of file diff --git a/src/main/md/Tools/maven.md b/src/main/md/Tools/maven.md new file mode 100644 index 00000000..84777ddd --- /dev/null +++ b/src/main/md/Tools/maven.md @@ -0,0 +1,47 @@ +## Often used maven command + +```bash +mvn clean package -Dmaven.test.skip=true +mvn clean package -Dmaven.test.skip=true -Dmaven.test.failure.ignore=true -Dmaven.test.haltafterfailure=no +mvn dependency:tree -Dverbose -Dincludes=commons-collections +mvn assembly:single +mvn -Dmaven.surefire.debug test –default remote debug port 5005 +mvn -Dmaven.surefire.debug=”-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8000 -Xnoagent -Djava.compiler=NONE” test +mvn exec:java -Dexec.mainClass="com.nikias.App" +``` + +## mvnDebug is handy. +* set MAVEN_DEBUG_OPTS="-Xdebug -Xnoagent -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=8000" + +## Configuring resources directory +```xml + + + + [your folder here] + + + + resource3 + + +``` + +## Configuring test-scrope and program +```xml + + java + + -classpath + --module-path + + com.example.Main + + test + +``` + +## References +* http://maven.apache.org/maven-1.x/plugins/test/properties.html +* [Cocurrent JUnit test runner for Parameters](http://stackoverflow.com/questions/10141648/concurrent-junit-tests-with-parameters) +* [JUnit parallel](http://java.dzone.com/articles/running-junit-tests-parallel) \ No newline at end of file diff --git a/src/main/md/Tools/mermaid.md b/src/main/md/Tools/mermaid.md new file mode 100644 index 00000000..9bd49d44 --- /dev/null +++ b/src/main/md/Tools/mermaid.md @@ -0,0 +1,21 @@ +* Generate SVG diagram from text +* Generates Sequence diagram and flowchart +* Graph can be layed out using + * TD - Top down + * LR - Left to right +* Features + * You can change the shape of the nodes + * Label the edges + * + +```mermaid +graph TD; + Thaslim --Friend of --> Kavin; + Thaslim --Brother of --> Thasthik; + Thasthik --Friend of --> Kavin; + Kavin --Son of --> Mohan; + Joe --Wife of --> Mohan; +``` + +* https://mermaidjs.github.io/sequenceDiagram.html +* https://mermaidjs.github.io/gantt.html \ No newline at end of file diff --git a/src/main/md/Tools/most_useful_es_query.md b/src/main/md/Tools/most_useful_es_query.md new file mode 100644 index 00000000..ea6de9c3 --- /dev/null +++ b/src/main/md/Tools/most_useful_es_query.md @@ -0,0 +1,283 @@ +# Most often used adming URL +* http://localhost:9200/_stats +* + +# Most often used URL +* *ElasticSearch Get can accept body* +* /_search +* /_cluster/health +* /index/_count +* /_cat/indices +* /gb/_mapping/tweet - mapping for type tweet that is under index -gb +* GET /gb/tweet/_validate/query query +* GET /gb/tweet/_validate/query?explain +* GET /_search?explain or /_search?explain?format=yaml +* GET /_search?routing=user_1,user2 +* GET /_search?search_type=count +* GET /old_index/_search?search_type=scan&scroll=1m +* DELETE /index_one,index_two or DELETE /index_* or DELETE /_all +* PUT /my_index_v1/_alias/my_index and/or GET /*/_alias/my_index +* +* To count the number of documents in the cluster + * ```{"query": { "match_all": {} }}``` +* To count the number of documents in the cluster + * ```{ "query" : { "match" : { "last_name" : "Smith" } }}``` +* To find all employees with a last name of Smith, but we want only employees who are older than 30 + * ```{ "query" : { "filtered" : { "filter" : { "range" : { "age" : { "gt" : 30 } 1 } }, "query" : { "match" : { "last_name" : "smith" 2 } } } }}``` +* To Full-Text Search - search for all employees who enjoy rock climbing - relevance + * ```{ "query" : { "match" : { "about" : "rock climbing" }}}`` +* To Full-Text Search - search for all employees who enjoy rock climbing - mandatory + * ```{ "query" : {"match_phrase" : {"about" : "rock climbing"}}}``` +* Highlight - produces html around matched words +```{ "query" : { "match_phrase" : { "about" : "rock climbing" } }, "highlight": { "fields" : { "about" : {} } } }``` +* Sample aggregation query +```{ "aggs": { "all_interests": { "terms": { "field": "interests" } } } }``` +* let�s find the average age of employees who share a particular interest: +```{"aggs" : { "all_interests" : { "terms" : { "field" : "interests" }, "aggs" : { "avg_age" : { "avg" : { "field" : "age" } } } } }}``` +* To update mappings + * ```PUT /gb `{"mappings":{"tweet":{"properties":{"tweet":{"type":"string","analyzer":"english"},"date":{"type":"date"},"name":{"type":"string"},"user_id":{"type":"long"}}}}}``` +* To highlight data + * ```{"query":{"match_phrase":{"about":"rock climbing"}},"highlight":{"fields":{"about":{}}}}``` + +* Create an index with 3 shards + * PUT /blogs - ```{ "settings" : { "number_of_shards" : 3, "number_of_replicas" : 1 }}``` +* ```{"bool":{"must":{"match":{"title":"how to make millions"}},"must_not":{"match":{"tag":"spam"}},"should":[{"match":{"tag":"starred"}},{"range":{"date":{"gte":"2014-01-01"}}}]}}``` + * The above query finds documents whose title field matches the query string how to make millions and that are not marked as spam. If any documents are starred or are from 2014 onward, + +* To update number of replicas + * PUT /blogs/_settings - ```{ "number_of_replicas" : 2}``` +* PUT /{index}/{type}/{id} +* To create new document and fail if existing one already available - PUT /website/blog/123/_create or PUT /website/blog/123?op_type=create +* PUT /website/blog/1?version=1 [We want this update to succeed only if the current _version of this document in our index is version 1.][To update first version] +* Retrieve only selected fields - GET /website/blog/123?_source=title,text +* To check if document exists without retrieving content - curl -i -XHEAD http://localhost:9200/website/blog/123 (HTTP-200 or HTTP-404) +* DELETE /website/blog/123 (HTTP-200 or HTTP-404) +* PUT /website/blog/2?version=5&version_type=external + +# Search URL +* /_search - Search all types in all indices +* /gb/_search - Search all types in the gb index +* /gb,us/_search - Search all types in the gb and us indices +* /g*,u*/_search - Search all types in any indices beginning with g or beginning with u +* /gb/user/_search - Search type user in the gb index +* /gb,us/user,tweet/_search - Search types user and tweet in the gb and us indices +* /_all/user,tweet/_search - Search types user and tweet in all indices +* GET /_search?size=5 +* GET /_search?size=5&from=10 +* ?format=yaml +* ?preference=prefrences, _primary, _primary_first, _local, _only_node:xyz, _prefer_node:xyz +* _id=hash_uuid and _uid = concatenare(_type, _id) = type#id +* + +# Query DSL +* Query DSL = Query DSL + Filter DSL. +* ```{"query":{"match_all":{}}}``` +* ```{"QUERY_NAME":{"ARGUMENT":"VALUE","ARGUMENT2":"VALUE"}}``` +* ```{"QUERY_NAME": { "FIELD_NAME" : {"ARGUMENT":"VALUE","ARGUMENT2":"VALUE"}}}``` +* Leaf clauses (like the match clause) that are used to compare a field (or fields) to a query string. +* Compound clauses that are used to combine other query clauses. For instance, a bool clause allows you to combine other clauses that either must match, must_not match, or should match if possible: + * Compound query + ```{"bool":{"must":{"match":{"email":"business opportunity"}},"should":[{"match":{"starred":true}},{"bool":{"must":{"folder":"inbox"},"must_not":{"spam":true}}}],"minimum_should_match":1}}``` +* A filter asks a yes|no question of every document and is used for fields that contain exact values: +* A query is similar to a filter, but also asks the question: How well does this document match? - how relevant each document is to the query +* Queries have to not only find matching documents, but also calculate how relevant each document is, which typically makes queries heavier than filters. Also, query results are not cachable. +* Filter results are cacheable +* The goal of filters is to reduce the number of documents that have to be examined by the query. +* As a general rule, use query clauses for full-text search or for any condition that should affect the relevance score, and use filter clauses for everything else. +* Always analyze production queries + * GET /gb/tweet/_validate/query query + * GET /gb/tweet/_validate/query?explain 1 +* Query debug for a document + ``` + GET /us/tweet/12/_explain + { + "query" : { + "filtered" : { + "filter" : { "term" : { "user_id" : 2 }}, + "query" : { "match" : { "tweet" : "honeymoon" }} + } + } + } +``` + + +# Filter DSL +* Term Filter - The term filter is used to filter by exact values, be they numbers, dates, Booleans, or not_analyzed exact-value string fields: +* Range filter +* For exact-value searches, you probably want to use a filter instead of a query, as a filter will be cached. +* You can use filter alone without query dsl (default query dsl for filter would be assuemed ```{ "query": { "match_all": {}} }``` +* + +# Query DSL +* must - And +* must_not - Not +* should - or +* If there are no must clauses, at least one should clause has to match. However, if there is at least one must clause, no should clauses are required to match. +* The multi_match query allows to run the same match query on multiple fields + * ```{"multi_match":{"query":"full text search","fields":["title","body"]}}``` +* The bool query, like the bool filter, is used to combine multiple query clauses. However, there are some differences. + * While filters give binary yes/no answers, queries calculate a relevance score instead. + * The bool query combines the _score from each must or should clause that matches. + +* Example of fitlered query + * ```{"filtered":{"query":{"match":{"email":"business opportunity"}},"filter":{"term":{"folder":"inbox"}}}}``` + * ```{"sort": { "date": { "order": "desc" }}}``` + +# Search tips +* +means that the word must be present. +* Default sort order is _score descending. +* _score can be quite expensive, and usually its only purpose is for sorting, can be forced = track_scores parameter to true. +* Multiple sorts - ```{"sort":[{"date":{"order":"desc"}},{"_score":{"order":"desc"}}]}``` + +# Analyzer +* GET /_analyze?analyzer=standard + +# Query String Query +* GET /_search?size=5&from=10 +* Name has john and tweet has mary ---> +name:john +tweet:mary ---> GET /_search?q=%2Bname%3Ajohn+%2Btweet%3Amary +* mary in any field (_all field) ---> +name:john +tweet:mary ---> GET /_search?q=mary +* The _all field contains either of the words aggregations or geo ---> +name:(mary john) +date:>2014-09-10 ---> (aggregations geo) ---> ?q=%2Bname%3A(mary+john)+%2Bdate%3A%3E2014-09-10+%2B(aggregations+geo) +* GET /_search?sort=date:desc&sort=_score&q=search + +Search types user and tweet in all indices + +* To update + ```POST /website/blog/1/_update + { + "doc" : { + "tags" : [ "testing" ], + "views": 0 + } + }``` +* Scripting +``` +POST /website/blog/1/_update +{ + "script" : "ctx._source.views+=1" +}``` + +* Below would produce "tags": ["testing", "search"] inside json document +``` +POST /website/blog/1/_update +{ + "script" : "ctx._source.tags+=new_tag", + "params" : { + "new_tag" : "search" + } +} +``` +``` +POST /website/blog/1/_update +{ + "script" : "ctx.op = ctx._source.views == count ? 'delete' : 'none'", + "params" : { + "count": 1 + } +} + +POST /website/pageviews/1/_update +{ + "script" : "ctx._source.views+=1", + "upsert": { + "views": 1 + } +} +//where the order of increments does not matter, below could be used +POST /website/pageviews/1/_update?retry_on_conflict=5 1 +{ + "script" : "ctx._source.views+=1", + "upsert": { + "views": 0 + } +} +``` + +* Retrieving Multiple Documents +``` +GET /_mget +{ + "docs" : [ + { + "_index" : "website", + "_type" : "blog", + "_id" : 2 + }, + { + "_index" : "website", + "_type" : "pageviews", + "_id" : 1, + "_source": "views" + } + ] +} + + +GET /website/blog/_mget +{ + "ids" : [ "2", "1" ] +} +``` + +* ElasticSearch - Bulk - bulk should have newline even for last line +``` +{ action: { metadata }}\n +{ request body }\n +{ action: { metadata }}\n +{ request body }\n +``` +``` +POST /_bulk +{ "delete": { "_index": "website", "_type": "blog", "_id": "123" }} +{ "create": { "_index": "website", "_type": "blog", "_id": "123" }} +{ "title": "My first blog post" } +{ "index": { "_index": "website", "_type": "blog" }} +{ "title": "My second blog post" } +{ "update": { "_index": "website", "_type": "blog", "_id": "123", "_retry_on_conflict" : 3} } +{ "doc" : {"title" : "My updated blog post"} } + +``` +* Multi value use mode (min, max, avg) for sorting +``` +{"sort":{"dates":{"order":"asc","mode":"min"}}} +``` +``` +PUT /my_temp_index/_settings +{ + "number_of_replicas": 1 +} +PUT /my_temp_index +{ + "settings": { + "number_of_shards" : 1, + "number_of_replicas" : 0 + } +} +``` + +``` +PUT /my_index +{ + "mappings": { + "my_type": { + "_id": { + "path": "doc_id" 1 + }, + "properties": { + "doc_id": { + "type": "string", + "index": "not_analyzed" + } + } + } + } +} +``` +``` +POST /_aliases +{ + "actions": [ + { "remove": { "index": "my_index_v1", "alias": "my_index" }}, + { "add": { "index": "my_index_v2", "alias": "my_index" }} + ] +} +``` \ No newline at end of file diff --git a/src/main/md/Tools/pandoc.md b/src/main/md/Tools/pandoc.md new file mode 100644 index 00000000..4df93f38 --- /dev/null +++ b/src/main/md/Tools/pandoc.md @@ -0,0 +1,6 @@ +# Installation +* Install pandoc +* [Install Miktex - latex on windows to generate pdfs](https://miktex.org/faq/) + +# Commands +* pandoc -f markdown HabitGroup.md -o HabitGroup.pdf diff --git a/src/main/md/Tools/public_sources.md b/src/main/md/Tools/public_sources.md new file mode 100644 index 00000000..b405b6fc --- /dev/null +++ b/src/main/md/Tools/public_sources.md @@ -0,0 +1,16 @@ +* Red Flagged stocks - https://www.fpi.nsdl.co.in/web/Reports/frmRedFlagList.html +* FPI Monitor - https://www.fpi.nsdl.co.in/web/Default.aspx +* Loan feed - https://cdn.ihs.com/www/pdf/WSOWeb-factsheet.pdf +* Financial Calenar - http://www.financialcalendar.com/Data/Trading-Hours/Overview +* Exchange Codes - https://www.iso20022.org/10383/iso-10383-market-identifier-codes +* Codes for the representation of names of countries and their subdivisions - https://www.iso.org/obp/ui/#iso:code:3166:DZ +* List of language codes - https://www.iso.org/iso-639-language-codes.html +* List of currency codes - https://www.iso.org/iso-4217-currency-codes.html + * https://www.iso.org/obp/ui/#iso:pub:PUB500001:en +* https://en.wikipedia.org/wiki/Financial_data_vendor + +* Credit Ratings - https://ratingagency.morningstar.com/mcr +* Hedge Fund Database - http://corporate.morningstar.com/US/asp/subject.aspx?xmlfile=545.xml +* http://www.morningstar.com/products/indexes +* https://www.gmeiutility.org/search.jsp +* http://www.mca.gov.in/LLP/dca/EES_Companies_List/DIRLIST1_00000000_00120000.pdf diff --git a/src/main/md/Tools/rosie_language.md b/src/main/md/Tools/rosie_language.md new file mode 100644 index 00000000..c8019053 --- /dev/null +++ b/src/main/md/Tools/rosie_language.md @@ -0,0 +1,45 @@ +# Rosie Pattern Language replaces regex (RPL) + +* What is problem with regex, how it fits with big-data? +* What is the role of rosie? +* Use cases of rosie + +* Regex + * regular grammar and doesn't support all recursive grammar + * These expressions can be difficult to write, and are notoriously difficult to read and maintain. + * Not modular, Can't compose and Not debuggable + * Not suitable for bigdata + * Not possessive - would backtrack and consider different match (would cause performance delay) + * PCRE regtex functions can match much more than regular languages (Perl Compatible REgex) + +* Grok + * Grok is apparently the plugin most depended upon for extracting information from log records, + * Grok has a library of named regex patterns + * Grok has proven to be flower than Rosie by a factor of around 4 + * Grok patterns are not as expressive as RPL, but more powerful than regex + +* RPL + * Based on PEG, could express recursive structures (like XML and JSON) that regular expressions cannot. + * PEG can run in linear time in the size of the input data, making them a good choice for processing big data. + * Supports contex-free grammar + * Like programming language + * RPL := Comments + Modules + Identifiers + Whitespace + Quoted Literals + Unit Tests + Macros + * Standard Patterns in the standard library + +* RPL + * is greedy - * would try to match as long as match possible (opposite is lazy as few as possible) + * i possesive - possessive if the matching engine will not backtrack to consider a different match + + +# RPL VS REGEX +* In regex or match would use | (pipe) +* In RPL or match would use / (slash) + +# Reference +* [Rosie Language](http://rosie-lang.org/ex/) +* [Rosie Pattern Language for faster data mining](https://www.youtube.com/watch?v=P5v2ZtcY2-k) +* [Rosie Pattern Language](https://developer.ibm.com/code/open/projects/rosie-pattern-language/) +* [Rosie presentation](https://developer.ibm.com/code/wp-content/uploads/sites/118/2017/11/Rosie-Update-Charts.pdf) +* [Rosie Replaces Regex for Data Mining](https://developer.ibm.com/code/wp-content/uploads/sites/118/2017/11/Rosie-Replaces-Regex.pdf) +* [Rossie QA](https://developer.ibm.com/code/2016/12/08/rosie-pattern-language-qa/) +* [Grok patterns](https://github.com/elastic/logstash/blob/v1.4.2/patterns/grok-patterns) \ No newline at end of file diff --git a/src/main/md/Tools/shell.md b/src/main/md/Tools/shell.md new file mode 100644 index 00000000..3e64fe67 --- /dev/null +++ b/src/main/md/Tools/shell.md @@ -0,0 +1,339 @@ +### Sum all the numbers in 1st collumn + +
show +

+ +```bash +gfind -name "*.scala" -exec wc -l {} \; | awk '{print $1}' | awk '{s+=$1} END {print s}' +``` +

+
+ +### Grep log file that within 10 minutes of interval +
show +

+ +```bash +CURRENT_10_MIN=`date +'%Y-%m-%d %H:%M'|cut -c 1-15` +PREV_10_MIN=`eval date -d \"10 min ago\" \"+'%Y-%m-%d %H:%M'\"|cut -c 1-15` +echo "grep \"${CURRENT_10_MIN}\" ${SERVER_LOGDIR}/${SERVER_LOG}|grep -v \"time string: 00000000T00\" +echo "grep \"${PREV_10_MIN}\" ${SERVER_LOGDIR}/${SERVER_LOG}|grep -v \"time string: 00000000T00\" +``` +

+
+ +### Grep between two lines +
show +

+ +```bash +awk '/FromText/,/ToText/' logfile.log +``` +

+
+ +### Find block of code in set of files that contains some word +
show +

+ +for file in $(egrep -l "report" `find . -name "*.ext"`); do awk '/Block {/,/}/' $file; done +

+
+ +/In sed wherever ' should be the target key, we should use '\'' (3 characters) +
show +

+ +```bash +sed -e 's/^/('\''/' -e 's/$/'\'')/' csv.txt > Ac.txt +``` +

+
+ +### Bulk rename file.. remove first 30 characters. +
show +

+ +```bash +ls -1 *.pdf | awk '{print "mv " $1, substr($1,31,30)}' > ren.sh +chmod 777 ren.sh +./ren.sh +``` +

+
+ +### Concatenate multiple files into one file +
show +

+ +```bash +export files=`gfind . -type f`; +export concat=output.txt +for file in $files; do echo $file >> $concat; cat $file >> $concat; done +

+
+ +### Find duplicate lines that are more than 5 characters to find duplicate lines. +
show +

+ +```bash +gawk "length($0)>5" passport.js | gsort | sed -e 's/^[ \t]*//' | sed -e 's/[ \t]*$//' | gsort | uniq -c | gsort -nr | grep -v "1 " +``` +

+
+ +### Analyze one particular line +
show +

+```bash +egrep --no-filename -r "distribute " * | sed -r -e "s/[\t]+//g" | sed -r -e "s/^[/t ]*//g" | sed -r -e "s/[/t ]*$//g"| sort | uniq | egrep -v -i site > /tmp/distribute.txt +``` +

+
+ + +### find the class inside multiple jars. +
show +

+ +```bash +for file in `find . -type f -name "*.jar"`; do unzip -l $file; done | egrep -i className +``` +

+
+ +### find the json inside default link. +
show +

+ +```bash +for a in `find . -maxdepth 2 -type l -name "default*"`; do find $a/conf/ -type f -name "*.json"; done; + + +### Merge all the file with name.. +

show +

+ +```bash +for file in `find . -name "*.yml"` ; do echo $file; cat $file; done + + +### find command line arguments +

show +

+ +```bash +ps -ww -fp 28862 +cat /proc/28862/cmdline + +### Find all the process that is connecting to remote port 1468 +

show +

+ +```bash +lsof+-i+:1468 + +### Recent Files +

show +

+ +```bash +find . -mmin -$((60*24)) -name "*[^0-9].log" | grep -v something.log + +### Old Files +

show +

+ +```bash +find . -mmin +$((60*24)) -name "*[^0-9].log" | grep -v something.log + +### Merge all the files +

show +

+ +```bash +find . -iname '*.java' -exec cat {} \; -exec echo \; > concatenated_source.java + +### file with 1 to 100 +

show +

+ +```bash +for var in `seq 1 100`; do echo $var; done > seq.txt + +### recursively egrep only in html files +

show +

+ +```bash +egrep -r -l --include=*.html some_text * + + +### SBT command line behind proxy +

show +

+ +```bash +set JAVA_OPTS=-Dhttp.proxySet=true -Dhttp.proxyHost=proxy.com -Dhttp.proxyPort=8080 -Dhttp.proxyUser=test -Dhttp.proxyPassword=Password + +### Execute script that is available in URL +

show +

+ +```bash +curl -sSL https://get.docker.io/ubuntu/ | sudo sh + +### Find and replace set of files +

show +

+ +```bash +sed -i ‘s/textToFind/replacementText/g’ `grep -ril ‘textToFind’ *` + +### Find all the file extensions recursively from current folder. +

show +

+ +```bash +find -type f | awk -F. '{print $NF}' | sort | uniq -c | sort -nr +``` +

+
+ +### recursively unzip files +
show +

+ +```bash +find . -name "*.gz" -exec gunzip '{}' \; +``` +

+
+ +### Bulk rename of gz to xml +
show +

+ +```bash +ls -1 | awk -F. '{print "mv \"" $1".gz\"", "\""$1".xml\""}' +``` +

+
+ +### Sort based on substring of filename +
show +

+ +```bash +find -type f -name \*gz | awk -F"/" '{print substr($4,1,16)}' | sort -nr +``` +

+
+ +### Find the number of files group by directory +
show +

+ +```bash +find . -type d -exec sh -c "fc=\$(find '{}' -type f | wc -l); echo -e \"\$fc\t{}\"" \; | sort -nr +``` +

+
+ +### Find if className available in any of the jar file +
show +

+ +```bash +find /home/user/project/ -name "dependency*.jar" -exec jar tvf '{}' \; | grep -i "classname" +``` +

+
+ +### Count number of lines in all successive directory in certain files alone. +
show +

+ +```bash +for file in `find -name "InputLog.csv"`; do echo $file; grep -v 'NotInterestedLines' $file | grep -E -w -c InteresteLine; done +``` +

+
+ +### Print rows into columns +
show +

+ +```bash +awk 'BEGIN { FS = "," }; {for (i=1;i +

+ +### Grep set of string in single file. +
show +

+ +```bash + +while read i ; do egrep ",$i," input_text.log; done setOfStringInMultipleLine.txt & +``` +

+
+ +### Or Generate egrep using following command +
show +

+ +```bash +cat files.txt | sort -r | tr '\n' '|' | sed s'/.$//' | sed s'/|/_|_/g' | sed s'/^/_/' | sed s'/$/_/' | sed s'/^/egrep "(/' | sed s'/$/)"/' +sdiff -w 175 file1.csv file2.csv |egrep '\||>| diff.txt +``` +

+
+ +### Find which svn commit, a souce line removed/added inside code/configuration file. +
show +

+ +```bash +C:\Mohan\Workspaces\workspace\TRUNK\application-component\src\main\resources>svn log -l 30 -v --diff configuration_server.xml > svn_log_entries.txt +cat svn_log_entries.txt | grep "ChangedLine" +``` +

+
+ +### Find all the Epoch seconds (date) that are not not fall in 10:00AM +
show +

+ +```bash +gawk '{print $1, $2, $3, strftime("%c", $6)}' isProblematic.txt | grep -v "10:00:00 AM" +``` +

+
+ +### Given key value tex file, replace all the key with value recusively in text file. +
show +

+ +```bash +while read line;do export oldTime=`echo $line | awk '{print $1}'`; export newTime=`echo $line | awk '{print $2}'`; find ./ -type f -name "*.properties" -exec perl -p -i -e s/$oldTime/$newTime/g {} \; ; done < /var/tmp/Mohan/find_and_replace/diff_time.csv + +echo "[{\"toto\":5},{\"toto2\":5}]" | python -c 'import json,sys;obj=json.load(sys.stdin);print str(obj[0]["toto"]) + "," + str(obj[0]["toto"])' +``` +

+
+ +### Create sbt directories using shell script +
show +

+ +```bash +#!/bin/sh +mkdir -p src/{main,test}/{java,resources,scala} +mkdir lib project target +``` +

+
diff --git a/src/main/md/Tools/speed_read.md b/src/main/md/Tools/speed_read.md new file mode 100644 index 00000000..79adeae1 --- /dev/null +++ b/src/main/md/Tools/speed_read.md @@ -0,0 +1,26 @@ +# How to read 800 words per minutes + +* Use below exercise with pencil +* During initial practice don't worry about comprehension, it is to improve eye speed and reading muscle memory +* Rapidly move pencil and try to read on only one direction for 2 minutes + * Avoid your eye move going back + * Stop using snapshot reading apprach +* Now use exactly 0.5 second for one line, and move fast to next line continiously for next 3 minutes + * Improving the last step, little more challenging + * This exercise should be done for 3 minutes +* Try to use your peripheral vision, and increase speed + * Start from second word and complete last but one word and move to next line but start from second word + * This would also improves periperhal vision and reading speed + * This exercise should be done for 1 minutes + * Here in this exercise, we can use 1 second for each line +* Try to use your peripheral vision, and increase speed but now for 2 words (start with 3rd word, and stop at 3rd last word) + * This exercise should be done for 1 minutes +* Try to use your peripheral vision, and increase speed but now for 3 words (start with 3rd word, and stop at 3rd last word) + * Here in this exercise, we can use 0.5 second for each line + * This exercise should be done for 3 minutes + + + +# References +* [Learn To Speed Read: Read 300% Faster in 15 Minutes](https://www.youtube.com/watch?v=PZqXBhGR_W8) +* [Measure reading speed](https://www.staples.com/sbd/cre/marketing/technology-research-centers/ereaders/speed-reader/index.html) \ No newline at end of file diff --git a/src/main/md/Tools/spring-security.md b/src/main/md/Tools/spring-security.md new file mode 100644 index 00000000..22fe7e5b --- /dev/null +++ b/src/main/md/Tools/spring-security.md @@ -0,0 +1,127 @@ +## Security + +* Castle approach / Multiple layers of defense +* Browser, Url, Classes and methods + +## Lombox +# @Getter +# @Setter +# @AllArgsConstructor +# @RequiredArgsConstructor +# @EqualsAndHashCode +# Delombok +# @Delegate +# @Cleanup +# @Builder + + +## Spring Security +* Heavy use of filter +* [Filter](https://tomcat.apache.org/tomcat-9.0-doc/servletapi/javax/servlet/http/HttpFilter.html) + * init(servletcofig) + * doFilter(req, res) + * destrory() +* [DelegatingFilterSecuity](https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/web/filter/DelegatingFilterProxy.html) + * It would forward all the request to other spring security filter + * Other security filter would be responsible for authentication/authorization + * registered in web.xml +* DelegatingFilterSecuity delegates to FilterChainProxy +* FilterChainProxy delegates to SecurityFilterChain +* Sample XML +```xml + + springSecurityFilterChain + org.springframework.web.filter.DelegatingFilterProxy + + + springSecurityFilterChain + /* + + + org.springframework.web.context.ContextLoaderListener + +``` +* There could be multiple filter chain for different url pattern + * /portfolio - BasicAuthenticaionFilter + * /admin - DigestAuthenticaionFilter +* [SecurityFilterChain](https://docs.spring.io/spring-security/site/docs/3.0.x/reference/security-filter-chain.html) + * + ```java + interface SecurityFilterChain{ + boolean matches(HttpServletRequest req); + List getFilters(); + } + ``` +* data class SecurityContext(auhtentication) +* data class SecurityContextHolder(securityContext) +* FilterChain + * SecurityContextPersistenceFilter + * AuthenticationFilters + * Basic + * Digest + * OIDC + * RememberMeAuthenticationFilter + * AnonymousAuthenticationFilter + * ExceptionTranslationFilter + * FilterSecurityInterceptor + * Interceptor performs authroization +* SecurityContextPersistenceFilter + * Manages security context + * Tries to find securityContext from SecurityContextRepository + * In WebApplication SecurityContextRepository is Session using HttpSessionRepository + * SecurityContextHolder is ThreadLocal + + +## Authentication +* Many flavours of authentication + * BasicAuthenticationFilter + * OpenIDAuthenticationFilter + * DigestAuthenticationFilter + * UsernamePasswordAuthenticationFilter +* Authentication filter intercepts requests and extracts authenticationToken + * Authentication filter generates AutenticationRequest + * Authentication filter delegates AutenticationMnager to authenticate AutenticationRequest + * There are many flavours of tokens are there + * UsernamePasswordAuthenticationToken + * OpenIDAuthenticationToken +* Filter delegates authentication to AuthenticationManager +* AuthenticationManager delegates to one ore more AuthenticationProvider + * OpenIDAuthenticationProvider + * DaoAuthenticationProvider + * LdapAuthenticationProvider + * Provider delegates to UserDetailsService + * Identity store is accessed by UserDetailService + * Every identity service requires its own Service and AuthenticationProvider + * DigestAuthenticationFilter is an exception, it doesn't delegates rather directly interacts with UserDetailsService +* + ```java + public interface AuthenticationManager + { + public Authentication authenticate(Authentication authentication) throws AuthenticationException + } + ``` +* + ```java + public interface Authentication + { + public boolean isAuthenticated(); + public boolean getPrincipal(); + Object getCredentials(); //password or ssl + Collection getAuthorities(); + } + ``` +* + ```java + public interface AuthenticationProvider + { + public Autentication authenticate(Aunthentication authentication) throws AutenticationException + public boolean supprts(class authentication) + } + public interface UserDetailsInterface{ + UserDetails loadUserByUsername(String username) throws UsernameNotFoundException + } + ``` + +## Reference +* Spring security authentication/authorizaion - building effective layers of defense - pluralsite course +* [Spring Security Primer](https://spring.io/guides/topicals/spring-security-architecture/) \ No newline at end of file diff --git a/src/main/md/Tools/text_to_sql.md b/src/main/md/Tools/text_to_sql.md new file mode 100644 index 00000000..53a0b8c5 --- /dev/null +++ b/src/main/md/Tools/text_to_sql.md @@ -0,0 +1,69 @@ +* [TextQL: Execute SQL Against CSV or TSV](https://news.ycombinator.com/item?id=16781294) +* SQLite can do + * ```bash + sqlite> create table test (id integer, datatype_id integer, level integer, meaning text); + sqlite> .separator "," + sqlite> .import no_yes.csv test + ``` + * sqlite import will not accept stdin, breaking unix pipes. textql will happily do so. + * sqllite - Need to create table upfront + * https://github.com/mingodad/sqlite3-hashcode-2018 + * textql supports quote escaped delimiters, sqlite does not. + * extql leverages the sqlite in memory database feature as much as possible and only touches disk if asked. + * https://github.com/dinedal/textql#key-differences-between-textql-and-sqlite-importing +* https://jsvine.github.io/intro-to-visidata/ + * Quickly open, explore, summarize, and analyze datasets + * Wonderul documentation +* http://harelba.github.io/q/ + * q - the name can make it a littler harder to find if you don't remember the repo. + * q can read stdin and write CSV to stdout + * q can chain several queries on the command line, or use it in series with other with other commands such as cat, grep, sed, etc. + * Highly recommended if you like SQL and deal with delimited files. + * q "SELECT COUNT(*) FROM ./clicks_file.csv WHERE c3 > 32.3" + * ps -ef | q -H "SELECT UID,COUNT(*) cnt FROM - GROUP BY UID ORDER BY cnt DESC LIMIT 3" + * Supports multiple encodin +* https://csvkit.readthedocs.io/en/1.0.3/ + * csvsql --query "select name from data where age > 30" data.csv > new.csv + * Csvsql uses sqlite under neath and you can do some nice things with this like joins, using sql functions, etc + * amazon athena that allows to do similar things in s3 at scale. + * Csvkit is great with pipes and you can also easily convert between csv tsv and even stuff like json +* Python's pandas library can also do sql-like queries against csv data. +* Charlatan - https://github.com/BatchLabs/charlatan#charlatan + * much lower memory footprint and faster execution + * works on stream + * subset of SQL that’s implemented +* [Windows you may consider using Log Parser Studio](https://gallery.technet.microsoft.com/office/Log-Parser-Studio-cd458765) + * https://en.m.wikipedia.org/wiki/Logparser +* [Apache Drill](https://drill.apache.org/) + * It works perfectly fine on local CSV and JSON files +* [BigBash](http://bigbash.it) + * Converts an Sql statement to a bash one-liner (using sed, grep, awk,...) + * Can execute the query using linux commands + * on very large file(s) because of the streaming nature +* https://github.com/kamac/AskXML + * Works on XML and JSON +* ClickHouse + * ```bash + ps aux | tail -n +2 | awk '{ printf("%s\t%s\n", $1, $4) }' | \ + clickhouse-local -S "user String, mem Float64" \ + -q "SELECT user, round(sum(mem), 2) as memTotal FROM table GROUP BY user ORDER BY memTotal DESC FORMAT Pretty" + ``` +* Sqawk + * ```bash + $ ps aux | sqawk -output table \ + 'select user, round(sum("%mem"), 2) as memtotal + from a + group by user + order by memtotal desc' \ + header=1 + ``` +* http://quisp.sourceforge.net/shsqlhome.html + * Similat to textql +* Facebook's osquery. + * Turns even complex structures into sql tables +* https://www.lnav.org/ + * An advanced log file viewer + * File formats are automatically detected and compressed files are unpacked on the fly. +* fsql - https://metacpan.org/pod/distribution/App-fsql/bin/fsql + * lets you perform SQL queries against one or several "flat" files of various formats. + * can modify data (currently CSV only) via SQL INSERT or DELETE commands. \ No newline at end of file diff --git a/src/main/md/Tools/tools_to_explore.md b/src/main/md/Tools/tools_to_explore.md new file mode 100644 index 00000000..38fa6c7e --- /dev/null +++ b/src/main/md/Tools/tools_to_explore.md @@ -0,0 +1,5 @@ +* [puppeteer recorder](https://github.com/checkly/puppeteer-recorder) +* Video speed controller chrome extension +* httpie +* tmux +* [paste command] (https://www.geeksforgeeks.org/paste-command-linux-examples/) diff --git a/src/main/md/Tools/virtualbox.md b/src/main/md/Tools/virtualbox.md new file mode 100644 index 00000000..52673fd5 --- /dev/null +++ b/src/main/md/Tools/virtualbox.md @@ -0,0 +1,7 @@ +# Virutal box +* Guest os is running inside VM +* Host os is the one which hosting virtualbox itself + + +# Virutalbox network +* When simple NAT is being used ensure all the service are listening on port 0.0.0.0 instead of 127.0.0.1, so that host can reach the guest OS diff --git a/src/main/md/Tools/vscode.md b/src/main/md/Tools/vscode.md new file mode 100644 index 00000000..3bfff7e3 --- /dev/null +++ b/src/main/md/Tools/vscode.md @@ -0,0 +1,52 @@ +# Ctrl + Shift + P > "Interactive Playground" > "Practice" +* ctrl + p + ? = Navigation tips from vscode +# https://vscodecandothat.com/ + + + +* select variable and press f2 - rename/refactor variable. +* select variable and press ctl+f2 - rename similar text (non programming language). +* ctrl + pp - goto the recently closed file +* zen mode (full screen) +* ctrl + p + @ = Navigate faster within the editor +* ctrl + p + @ + : = Split editor with classes, methods and functions +* + +## Settings +* User settings - %APPDATA%\Code\User\settings.json +* "files.eol": "\n" --setttings +* "editor.mouseWheelZoom" +* editor.fontFamily=fira + +# Icons and Theme +* Materials - Icon Theme +* Cobalt2 + +* [Alt + H + K](Keyboard shortcuts) +* [Ctrl + 1 / 2](Swith between split editor) +* [Ctrl + B](Toggle between sidebar) +* [Ctrl + `]([Ctrl + J] (Toggle between lower terminal windowd) + + +* [Code runner](https://github.com/formulahendry/vscode-code-runner) +* [VS Code emmet](https://code.visualstudio.com/docs/editor/emmet) + + + * https://docs.emmet.io/cheatsheet-a5.pdf +* [Visual Studio Code Can Do That: Tips & Tricks](https://www.youtube.com/watch?v=OOG3xcUQY5k) +* https://wiki.nikitavoloboev.xyz/text-editors/vs-code/vs-code-extensions.html +* https://code.visualstudio.com/blogs/2017/11/15/live-share + +* Checklist +* Material, Cobalt2, Fira Font and font.ligature +* Mouse zoom + +## Plugins +* Quokka.js +* REST Client +* Prettier +* 30 seconds of code + +# References +* Command Palette +* [10 Essential Plugins](https://hackernoon.com/10-essential-vs-code-extensions-for-javascript-developers-in-2019-e8320e3f421e) \ No newline at end of file diff --git a/src/main/md/W3SelectorApi.md b/src/main/md/W3SelectorApi.md new file mode 100644 index 00000000..fca4eb1b --- /dev/null +++ b/src/main/md/W3SelectorApi.md @@ -0,0 +1,33 @@ +# W3C Selectors API +* Basic API used accepted to iterate and navigate and read dom nodes, All browswers, JSoup and D3S +* Even if the method is invoked on a sepcific node element, selectors are still evaluated in the context of the entire document. +* The querySelector() methods on the Document, DocumentFragment, and Element interfaces must return the first matching Element node within the subtrees of the context node. If there is no matching Element, the method must return null. +* The querySelectorAll() methods on the Document, DocumentFragment, and Element interfaces must return a NodeList containing all of the matching Element nodes within the subtrees of the context node, in document order. If there are no matching nodes, the method must return an empty NodeList. + + +| Selector | Usage | Example | +|:--------:|:--------:|:--------:| +| #id | "#content" | Select using id of the tag, Generally container tag | +| p.description | className applied to a tag p | if we need to select all tag that has a class description | +| "selector1, selector2" | "p.warning, p.error" | select all p elements in the document that have a class of either "error" or "warning". | +| img:nth-of-type(n) | "#content img:nth-of-type(1)" | returns the 1st image within container "#content" | +| form[action="feedback.php"] | "#content" | Select using id of the tag, Generally container tag } | +| #body img[src^="http" | within body | selects 1st image with src beginning with "http" | +| #body img[src^="http" | within body | selects 1st image with src ending with "http" | + + +## DOM API +* Node and Element, NamedNodeMap, NodeList +* Document, DocumentFragment +* Document interface gives the factory methods needed to create elements, nodes +```IDL +# - 0 index +interface NodeList { + Node item(in unsigned long index); + readonly attribute unsigned long length; +}; +``` + +* [DOM API](https://www.w3.org/TR/2004/REC-DOM-Level-3-Core-20040407/core.html#ID-536297177) +* [Selector API](https://www.w3.org/TR/selectors-api/) +* [Examples](http://www.javascriptkit.com/dhtmltutors/css_selectors_api.shtml) \ No newline at end of file diff --git a/src/main/md/Wise_use_of_time.md b/src/main/md/Wise_use_of_time.md new file mode 100644 index 00000000..48dce785 --- /dev/null +++ b/src/main/md/Wise_use_of_time.md @@ -0,0 +1,12 @@ +* Creating many good habits would by default occupy time effectively + + +* Yoga +* Meditation +* Listening some ted talk +* Listening some spritual talks +* Listening technical conference videos +* Play/Learn music +* Learning astrology +* Reading book that was planned to read "someday" +* \ No newline at end of file diff --git a/src/main/md/Work/BuildAppInModernWay.md b/src/main/md/Work/BuildAppInModernWay.md new file mode 100644 index 00000000..65374856 --- /dev/null +++ b/src/main/md/Work/BuildAppInModernWay.md @@ -0,0 +1,7 @@ +* https://webflow.com/ +* Airtable +* Zapier + + +## Reference +* [What people build](https://news.ycombinator.com/item?id=19701783) \ No newline at end of file diff --git a/src/main/md/Work/Complements.md b/src/main/md/Work/Complements.md new file mode 100644 index 00000000..ad378ff4 --- /dev/null +++ b/src/main/md/Work/Complements.md @@ -0,0 +1,27 @@ +* He's friendly and can count on him to help people +* Always available when needed and quite responsive +* She/he was able to build relationship +* Was able to drive various engagements with top executives +* He has very good functional knowledge and also very good management skills. +* He is amiable, approachable and team player. +* He has a perfect mix of soft skills and technical proficiency. +* Good problem solving ability and helping nature. +* Who has vast technical knowledge and the generous ability to share it. +* He is always focused on business value, and excels in translating to.. +* He is detail oriented and has ability to manage complex projects +* He collaborate very well across the entire organization both horizontally and vertically. + + +# For constructive - Focus on behavior rather than the person + +* You demonstrate a high degree of confidence when you answer customer questions about registration procedures, rather than "Your communication skills are good" +* You talked considerably during the staff meeting, which prevented me from getting to some of the main points, rather than "You talk too much" + +* THE SIX STEP METHOD FOR GIVING CONSTRUCTIVE FEEDBACK + +* Step 1: State the constructive purpose of your feedback. +* Step 2: Describe specifically what you have observed. +* Step 3: Describe your reactions. +* Step 4: Give the other person an opportunity to respond +* Step 5: Offer specific suggestions. +* Step 6: Summarize and express your support \ No newline at end of file diff --git a/src/main/md/Work/DevOps_SRE.md b/src/main/md/Work/DevOps_SRE.md new file mode 100644 index 00000000..98e0eee6 --- /dev/null +++ b/src/main/md/Work/DevOps_SRE.md @@ -0,0 +1,40 @@ +# Which is better DevOps or SRE? +## DevOps +* Back in the day operators and developers had a lot of contention developers used to throw their code over the metaphorical wall and operators were responsible for keeping that code running in production operators had little understanding of the code bases and developers had little understanding of operational practices +* Developers were concerned with shipping code and operators were concerned with reliability this misalignment often caused tension within the organization +* Developers were responsible for features and the operators were responsible for stability +* Being the developers wanted to move faster to get their features out faster a +* Operators wanted to move slower to keep things stable DevOps is a set of practices and a culture designed to break down those barriers between developers operators and other parts of the organization +---- +## DevOps five key areas +* first reduce organizational silos + * by breaking down barriers across teams + * we can increase collaboration and thoroughput +* second accept failure is normal computers are inherently unreliable + * so we can't expect perfection + * when we introduce humans into the system we get even more imperfection +* third implement gradual change + * changes are not small, incremental changes easier to review + * but in the event that a gradual change does make a bug in production it allows us to reduce our mean time to recover making it simple to rollback +* fourth - leverage tools + * we need to leverage tooling and automation +* fifth + * we need to measure everything + * measurement is a critical gauge for success + * Without a way a measure of our first four pillars were successful we would have no way of knowing if they were so +```java + class SRE implements DevOps +``` + +## Role of SRE +* SRE share ownership of production with our developers +* SRE use the same tooling in order to make sure everyone has the same view and same approach to working with production +* SRE should have blameless post-mortems where SRE should make sure that the failures that happen in our production systems don't happen the exact same way more than once +* SRE accept the failures as normal by encoding a concept of an error +* SRE can try canary things that roll things out to a small percentage of the fleet before move them out for all +* Auotmate as much as possible. +* Fifth when you talked about measuring everything + * Measuring the reliability and health of our systems + +## References +* [DevOps vs SRE](https://www.youtube.com/watch?time_continue=297&v=uTEL8Ff1Zvk)_ \ No newline at end of file diff --git a/src/main/md/Work/DifficultConversation.md b/src/main/md/Work/DifficultConversation.md new file mode 100644 index 00000000..6335b39d --- /dev/null +++ b/src/main/md/Work/DifficultConversation.md @@ -0,0 +1,131 @@ +## Don't delay the difficulat conversation +## Great communication +* Great communication isn’t just about what you say, it’s about what other people hear. +---- +## Myths to delay +* ‘It will sort itself out.’; +* ‘They’ll eventually stop doing it.’; +* ‘There are more important things to focus on.’ +---- +## Balancing empathy +* Too much empathy, we forget the cause behind the conversation +* Too much of our need, loose the empathy +---- +## NVC (Non-violent communiation) +1. When ____[observation], +1. I feel ____[emotion], +1. because I’m needing some ____[universal needs]. +1. Would you be able to ____[request]? +---- +## NVC makes some subtle but critical distinctions: +1. Observations vs. evaluations +1. Emotions vs. thoughts +1. Universal needs vs. strategies +1. Requests vs. demands +---- +## Evaluation vs Observation +* What we saw or hear without judgement is Observation. +* What we interpret what we saw was evulation (can have emotion attached) +---- +## Example of Evaluation vs Observation +1. Evaluation: ‘You are lazy.’ (a character attack) +1. Observation: ‘You said that you’d send the document last week and I haven’t received it.’ +1. Evaluation: ‘Your work is sloppy.’ (a criticism) +1. Observation: ‘Three of the numbers in the report were inaccurate.’ +1. Evaluation: ‘You’re always late.’ (a generalisation) +1. Observation: ‘You arrived ten minutes late to the meeting this morning.’ +1. Evaluation: You ignored me.’ (implied intent) +1. Observation: ‘I sent you two emails and I haven’t received a response.’---- +---- +## Emotions vs Thoughts +1. Emotions has powerful impact on other people +1. During difficult coversation watch-out words about emotions, it describes how they feel + * Annoyed/Anxious/Confused/Embarrassed + * Hurt/Sad/Scared/Tired +---- +## What comes after I feel, is a thought +* What comes after ‘I feel,’ isn’t an emotion — it’s a thought. +---- +## Can we share our thoughts on difficult conversation +* Sharing our thoughts in difficult conversations can often get us into trouble +---- +## Emotions that require extra attention and curiosity before sharing them +* Anger +* Evluative words +---- +## Emotion - Anger +* Most often hides more painful other emotion + * Hurt + * Shame + * Cheated +* Find what is underneath the anger before difficult conversation +* Anger would let speak impulsively, will let us forget NVC +---- +## Evluative words +* Some of the evaluative words are indirect question + * I feel blamed (you are blaming me?) +* Recognize evaluative words and find how you feel and impacts you emotionally. + * Judged/Misunderstood/Rejected/Hurt +---- +## Universal Needs +* Everyone has few univesal needs + * Autonomy/Collaboration/Consistency/Clarity + * Integrity/Recognition/Respect/Reassurance + * Security/Support/Understanding +* Universal needs can be met using strategy +---- +## Universal Needs +* Need: ‘I need some transparency.’ + * Strategy: ‘I need you to copy me into every email.’ +* Need: I need support + * Strategy: ‘I need support from you.’ +---- +## Sample conversation +* I request that you be more respectful’ + * ‘I request that you arrive at meetings on time.’ +* ‘I request that you don’t dismiss other people’s ideas straightaway,’ + * ‘I request that when a team member shares an idea, you ask two or three probing questions before sharing your conclusion.’ +* Ensure on same page + * ‘Just so we know we’re on the same page, could you play back what I’m asking of you?’ +---- +## What not to say? +* Say what you want, not what you don’t want. +---- +## Sample communications +* Try to communicate within 30-40 words. + * Difficult but should learn +* Preparation for 30–40 words may sound like a lot of work. It is a lot of work. +* To a co-founder: ‘When you said, “I’m not happy with your work,” to me in front of the team, I felt embarrassed because it didn’t meet my need for trust and recognition. Please, could we set up a weekly one-on-one session to share feedback in private?’ +* To an investor: ‘I haven’t received any responses from the last three monthly updates. I’m feeling concerned because I need input. Please, would you mind getting back to me with responses to my questions in the last update?’ +* To a teammate: ‘You arrived 10 minutes late to the last three team meetings. I am frustrated because, as a team, we have a need for efficiency. Please, could you help me understand what’s happening?’ +---- +## Despite all effort, when there is 'No', how to react? +* Delivering 'NO' was difficult to other person +* Show empathy - Empathy is the gift of hearing someone without taking it personally +* When you have to say ‘no’ + * State the underlying need that stops you from saying ‘yes’. +## Effective communication consequences +* Purpose is to protect our needs, not to punish the other person. +---- +##Key points +1. If you understand the subconversations that exist within a difficult conversation, you will be able to manage the issues more effectively. +1. Never assume that you understand someone else’s motives. +1. Instead of trying to assign blame, consider how harnessing the associated anger will help you prevent a bad outcome in future. +1. Learn to see the “third story,” the impartial account, rather than fighting over whose story is true. +1. Venting your feelings is not useful. +1. Describing them carefully can be. +1. Don’t try to control other people’s responses. +1. You aren’t perfect and neither is anyone else. +1. The best way to get someone to listen to you is to listen to them first. +1. Reframing a difficult conversation is a great way to defuse it. +1. Pick your difficult conversations carefully. You won’t live long enough to have them all. +---- +## Reference +[Difficult conversation guide](https://medium.dave-bailey.com/the-essential-guide-to-difficult-conversations-41f736e63ccf) +[Comments on Hacker News](https://news.ycombinator.com/item?id=19490573) +[How to give (and receive) constructive criticism: a primer](https://www.reddit.com/r/femalefashionadvice/comments/3jfxk8/how_to_give_and_receive_constructive_criticism_a/) +[When giving negative feedback](https://www.reddit.com/r/LifeProTips/comments/9e4oc9/lpt_when_giving_negative_feedback_take_a_small/) +[Unable to receieve cc](https://www.reddit.com/r/howtonotgiveafuck/comments/29lvjq/i_am_unable_to_handle_constructive_criticism_in_a/) +[Emotional Literacy](https://en.wikipedia.org/wiki/Emotional_literacy) +[NVC Marshall Rosenberg - San Francisco Workshop - FULL ENGLISH SUBTITLES TRANSCRIPTION](https://www.youtube.com/watch?v=l7TONauJGfc) +---- \ No newline at end of file diff --git a/src/main/md/Work/WhyIQuitGoogle.md b/src/main/md/Work/WhyIQuitGoogle.md new file mode 100644 index 00000000..7467f949 --- /dev/null +++ b/src/main/md/Work/WhyIQuitGoogle.md @@ -0,0 +1,15 @@ +* You're only promoted when you're consistently showing you're working at the level you want to be promoted to +* When the existence of an indicator of a problem becomes a metric in itself, it ceases to indicate anything + * "When a measurement becomes a target, it ceases to be a good measurement". + * https://en.wikipedia.org/wiki/Goodhart%27s_law + * https://en.wikipedia.org/wiki/Campbell%27s_law + +* When there is new task, Employee asked himself whether it would help his case for promotion. If the answer was no, he didn’t do it. + +* [Reddit: Why I Quit google](https://www.reddit.com/r/programming/comments/80whkc/why_i_quit_google_to_work_for_myself/) +* [HackerNews: Why I Quit google](https://news.ycombinator.com/item?id=16483241) + +* Contracting + * Maybe that small companies in areas with no local devs will take on remote contractors because they have no other choice? + * It's less about what you know (pass some baseline of competence/ability) and more about targeting the right market and knowing the right people + * \ No newline at end of file diff --git a/src/main/md/Work/dress_code.md b/src/main/md/Work/dress_code.md new file mode 100644 index 00000000..bf4febdd --- /dev/null +++ b/src/main/md/Work/dress_code.md @@ -0,0 +1,10 @@ +* Formal business attire +* Casual pants, capris +* Business appropriate casual shirts, polo shirts, sweaters, tops +* Denim and blue jeans +* T-Shirts and atheletics shoes are not cosnidered business causals + +## Casual Dress Code +* Denim/Jeans +* Athelitic Shoes and casual shoes that are in good condition +* Business Appropriate T-shirts in good condition, prefessional and do not include inappropriate messages or logos \ No newline at end of file diff --git a/src/main/md/books/Better_Books_ToRead.md b/src/main/md/books/Better_Books_ToRead.md new file mode 100644 index 00000000..63259d74 --- /dev/null +++ b/src/main/md/books/Better_Books_ToRead.md @@ -0,0 +1,80 @@ +# Priority +* [Event-Driven Microservices - Chris Richardson] (https://www.safaribooksonline.com/videos/event-driven-microservices/9781491944165) +* Code: The Hidden Language of Computer Hardware and Software +* Understanding Computation - From Simple Machines to Impossible Programs +* Building Microservices: Designing Fine-Grained Systems +* SRE book +* Cassandra: The Definitive Guide: Distributed Data at Web Scale 2nd Edition +* Fluent Python + + + +## Done +* [Elasticsearch: The Definitive Guide] (https://www.safaribooksonline.com/library/view/elasticsearch-the-definitive/9781449358532/) + +# Communication +* Marshall Rosenberg - https://www.amazon.co.uk/Nonviolent-Communication-Language-Marshall-Rosenberg/dp/1892005034 + +# Math series +* The Signal and the Noise: Why So Many Predictions Fail--but Some Don't (Statistics) +* Everybody-Lies-Internet-About-Really +* Weapons of Math Destruction: How Big Data Increases Inequality and Threatens Democracy +* The Theory That Would Not Die (Bayes' rule) +* Bayes' Theorem Examples: A Visual Introduction For Beginners +* How Not to Be Wrong - Jordan Ellenberg + +# Memory +* Unlimited Memory: How to Use Advanced Learning Strategies to Learn Faster, Remember More and be More Productive Kindle Edition by Kevin Horsley +* + +# Demystified Math series +* Discrete Mathematics +* Calculus DeMYSTiFieD +* Math Proofs Demystified by Stan Gibilisco +* Math Word Problems Demystified by Allan Bluman +* Mathematica Demystified by Jim Hoste +* Technical Math Demystified by Stan Gibilisco +* Business Math Demystified by Allan Bluman +* Everyday Math Demystified by Stan Gibilisco +* Advanced Statistics Demystified by Larry Stephens +* Business Statistics Demystified by Steven M Kemp +* Engineering Statistics Demystified by Larry J. Stephens +* Statistical Process Control Demystified by Paul Keller +* Statistics Demystified by Stan Gibilisco +* Differential Equations Demystified by Steven G. Krantz +* Probability Demystified 2/E 2nd Edition +* Geometry DeMYSTiFieD, 2nd Edition Stan Gibilisco +* Trigonometry Demystified 2/E + +## Cloud Books +* Accelerate: The Science of Lean Software and DevOps: Building and Scaling High Performing Technology Organizations +* Cloudonomics, + Website: The Business Value of Cloud Computing 1st Edition +* Cloud Native Java: Designing Resilient Systems with Spring Boot, Spring Cloud, and Cloud Foundry +* INSPIRED: How to Create Tech Products Customers Love +* Product Roadmaps Relaunched: How to Set Direction while Embracing Uncertainty + + +# Other books +* Amazon "Best Sellers in Computer Science" +* https://www.amazon.com/Best-Sellers-Books-Computer-Science/zgbs/books/3508/ref=zg_bs_nav_b_2_5 +* Tech books that were translated to other languages +* https://www.amazon.com/Best-Sellers-Kindle-Store-Information-Management/zgbs/digital-text/154927011/ref=zg_bs_nav_kstore_5_154902011 +* https://www.amazon.com/Heart-Logs-Stream-Processing-Integration +* The Phoenix Project: A Novel about IT, DevOps, and Helping Your Business Win 5th Anniversary Edition +* Everybody Lies: Big Data, New Data, and What the Internet Can Tell Us About Who We Really Are +* Thinking in Systems: A Primer +* The Design of Everyday Things: Revised and Expanded Edition +* Principles: Life and Work + +* ["Cartoon guide to Physics"](https://www.amazon.com/Cartoon-Guide-Physics/dp/0062731009) +* ["The Cartoon Guide to Calculus"](https://www.amazon.com/Cartoon-Guide-Calculus/dp/0061689092/ref=sr_1_5?s=books&ie=UTF8&qid=1537065864&sr=1-5&keywords=cartoon+guide+to+statistics&dpID=51gQZ9XpaYL&preST=_SX218_BO1,204,203,200_QL40_&dpSrc=srch) +* Larry Gonick - books - https://www.amazon.com/Larry-Gonick/e/B000AQ75IY/ref=dp_byline_cont_book_1 + +# Self help books +* The Subtle Art of Not Giving a F*ck: A Counterintuitive Approach to Living a Good Life - Mark Manson +* Thinking in Bets - Annie Duke +* How will you measure your Life - Clayton M/Christensen +* Stress Proof - Mithu Storoni + +## Ebooks +* [The subtle art of not giving fuck](https://issuu.com/magiimaa/docs/_mark_hanson__the_subtle_art_of_not) \ No newline at end of file diff --git a/src/main/md/books/Notes/5 elements of effective thinking.md b/src/main/md/books/Notes/5 elements of effective thinking.md new file mode 100644 index 00000000..a9ccc482 --- /dev/null +++ b/src/main/md/books/Notes/5 elements of effective thinking.md @@ -0,0 +1,85 @@ +# Essential thinking +* Thought provking ideas to provke thoughts +* Curiousity, Obsession, Dogged Endurance, Combined with Self-Criticism bring new ideas (Einestein) +* Education is all about how we solve problems even after forgetting everything that we learnt + +## Esential & Core +* Master the core +* Shadows are colors of the sky +* What does the Picasso - "The Bull" diagram teaches? +* Cut the bull, describe what you see (Stop assuming, bullshit) +* Earth + * Earth - Deep understanding + * Fire - Effective Failure + * Air - Question + * Water - Flow of Idea +---- +## Understand Deeply +* Understanding is not yes/no, it is spectrum +* We understand some aspect of understanding +* There are aspects of this issue that I don't understand, I now must uncover them and work toward making greater meaning. + +1. Start with simple + * When complex issue is encountered + * Find simplest problem that is still unsolvable + * find simper version of it, and solve it +1. Spot the specific + * Find special case and scrtuanize micro-level details, and try to generalize +1. Add the adjuctive + * Challenge yourself to add as many as adjective as possible + * Do not leave an adjective for another descriptor untill some new facet is revealed + * Uncover the hidden confusion or misunderstanding using new adjectives + * First-world-war, the war that ends all the war, 2nd world-war +---- +## Fail Effectively +* We may not know what is the right way to do it right, but we can always start with someting wrong +* Failure as a process to solve the problem + * Kid solving 36 into to two halves + * Find answer something definitely it is wrong (16) + * He answered 16, and successive question, what makes to prove that 16 is wrong? + * Kid came back with 18 is the answer +* There is no better teacher than one's own failure, but stay with failure and learn a new lesson +* Fail fast + * Fail fast, revise, rethink and relearn + * Don't stare at screen, rather fail fast with new draft mail + * Don't stare at screen, rather fail fast with buggy code that doesn't even compile +* Fail again - n times + * For enormous task, should fail n times before succeeding + * One failure done, nine to go +* Fail intentionally + * Clever failed ideas are started as failed attempts +---- +## Create questions +* Creating questions makes us from bystander to active participant +* Prompt - "What are your questions?" +* What are your questions to share with this group? +* One who raises question is open minded +* What is the real issue here? +* Raise "What if questions?" +* Fundamental Question + * What does the simples case look like? + * What happens in that trivial situation? +* Ask something else + * What is a different but related question? + * What is the opposite point of view? +---- +## Go with the Flow of Ideas +* With one idea in hand, what is next? +* Extend the current idea +* Generalize +* Reapply this new notion +* Run down all the paths of ideas till the dead-end and learn, and come back +---- +## Embrace Doubt +* Opposite of doubt is not certainity, it is closed mind +* What if I am wrong? +* Empathize without sympathize for opposite view +* Live and believe opposite view for certain time as if it logically make sense +* Live like thiest for a week, and athiest for a week. Try and experiment alternate + +## Never stop the idea +* Tenacity and Perseverance is very important to learn the complete impact about the idea +---- +## Be open to change +* Small and incremental changes will transform how we think and engage with the world +* How we see a problem wille effectively change about our perception over period of time \ No newline at end of file diff --git a/src/main/md/books/Notes/5 elements_puzzle.md b/src/main/md/books/Notes/5 elements_puzzle.md new file mode 100644 index 00000000..4dc3bf71 --- /dev/null +++ b/src/main/md/books/Notes/5 elements_puzzle.md @@ -0,0 +1,9 @@ +## Lesson learnt +* Who' who? + * Take some path. If you hit dead end, your initial assumption could be wrong, find all if clasuses +* When six equals eight? + * + +# Whos and who, http://highered.blog.statesman.com/2016/05/31/a-sample-puzzle-from-southwestern-presidents-seinfeld-of-classes/ + * “One afternoon on a college campus over a hundred miles from Georgetown, two students — a math major and a philosophy major — were talking. ‘I am a math major,’ said the one with black hair. ‘I am a philosophy major,’ said the one with red hair. Given that at least one of these students is lying, what color hair does the math major have?” + \ No newline at end of file diff --git a/src/main/md/books/current_reading_queue.md b/src/main/md/books/current_reading_queue.md new file mode 100644 index 00000000..44f1b895 --- /dev/null +++ b/src/main/md/books/current_reading_queue.md @@ -0,0 +1,12 @@ +1. Spring in Action, Fifth Edition +2. After the trade is made +3. Securities Operations: A Guide to Trade and Position Management +4. Productivity book - xyz +5. deep-learning-with-python +6. Hands-On-Machine-Learning-with-Scikit-Learn-and-TensorFlow +7. Java-Comparison-Become-Craftsman-Examples +8. Effective java 3rd edition +9. Refactoring: Improving the Design of Existing Code, Second Edition +10. https://www.goodreads.com/en/book/show/6251150-what-intelligence-tests-miss +11. https://www.goodreads.com/en/book/show/40647884-making-up-your-own-mind +12. https://www.goodreads.com/book/show/17286670-scarcity \ No newline at end of file diff --git a/src/main/md/books/stoicsm_books.md b/src/main/md/books/stoicsm_books.md new file mode 100644 index 00000000..aa8495d7 --- /dev/null +++ b/src/main/md/books/stoicsm_books.md @@ -0,0 +1,4 @@ +* [Resources for new comers](https://www.reddit.com/r/Stoicism/comments/1br1tp/resources_for_those_new_to_stoicism/) +* [The Enchiridion](http://classics.mit.edu/Epictetus/epicench.html) +* [Commentary on Epictetus' Enchiridion](https://web.archive.org/web/20050519213611/http://www.geocities.com/stoicvoice/journal/0301/sc0301b0.htm) +* [Marcus Aurelius](https://www.iep.utm.edu/marcus/) \ No newline at end of file diff --git a/src/main/md/bpm/bpm_readme.md b/src/main/md/bpm/bpm_readme.md new file mode 100644 index 00000000..605f2c7a --- /dev/null +++ b/src/main/md/bpm/bpm_readme.md @@ -0,0 +1,26 @@ +# Influential developers +* Tom Baeyens + * jBPM-1-4, Activiti 5.0, Signavio and RockScript.io +* Joram Barrez + * jBPM-1-4, Activiti 5.0 + * http://www.jorambarrez.be/blog/category/bpmn/ +* Paul Holmes-Higgin - Alfresco, Flowable + * https://paulhh.wordpress.com/category/flowable/ +* Bernd Rücker - Camunda co-founder + * https://blog.bernd-ruecker.com/ +* Daniel Meyer + * https://github.com/meyerdan + +# Flowable blogs +* http://bpmn20inaction.blogspot.sg/2017/02/event-sub-processes-in-flowable-6.html +* http://www.jorambarrez.be/blog/2016/11/02/running-flowable-on-cockroachdb/ +* https://spring.io/blog/2015/03/08/getting-started-with-activiti-and-spring-boot +* https://paulhh.wordpress.com/category/flowable/ + +# Camunda blogs +* + +# Tools +* The Yaoqiang BPMN Editor - Can connect to Activiti engine and thus can be used as a graphical workflow authoring interface +* The DocuBrain® Workflow Editor - Can connect to Activiti engine +* \ No newline at end of file diff --git a/src/main/md/career.md b/src/main/md/career.md new file mode 100644 index 00000000..b38f07e0 --- /dev/null +++ b/src/main/md/career.md @@ -0,0 +1 @@ +* https://capd.mit.edu/sites/default/files/about/files/career-handbook.pdf \ No newline at end of file diff --git a/src/main/md/dataintensive.md b/src/main/md/dataintensive.md new file mode 100644 index 00000000..0eeb88bd --- /dev/null +++ b/src/main/md/dataintensive.md @@ -0,0 +1,33 @@ +* Read-after-write consistency - Users should always see data that they submitted themselves. +* Monotonic reads - After users have seen the data at one point in time, they shouldnt later see the data from some earlier point in time. +* Consistent prefix reads - Users should see the data in a state that makes causal sense: for example, seeing a question and its reply in the correct order. +* Read Repair +* r + w > n (replica) +* Hinted Handoff +* Failure reasons + * different order at different nodes + * due to variable network delays + * partial failures +* two operations concurrent if they are both unaware of each other, regardless of the physical time at which they occurred +* Handling Write Conflicts. + * Last write wins + * Happens before +* Use of a version number per replica as well as per key - is kind of vector clock with subtle difference +* The collection of version numbers from all the replicas is called a version vector +* Partition + * Key range partitioning + * Hash partitioning + * Hybrid approaches are also possible, for example with a compound key: using one part of the key to identify the partition and another part for the sort order + * Document-partitioned indexes (local indexes), where the secondary indexes are stored in the same partition as the primary key and value. + * Term-partitioned indexes (global indexes), where the secondary indexes are partitioned separately, using the indexed values + +* Transaction +* Lost Updates + * ATOMIC WRITE OPERATIONS - ORM may spoil it + * UPDATE counters SET value = value + 1 WHERE key = 'foo'; + * EXPLICIT LOCKING + * AUTOMATICALLY DETECTING LOST UPDATES + * COMPARE-AND-SET + * last write wins (LWW) conflict resolution method is prone to lost updates - Don't use them +* Write Skew and Phantoms + * d \ No newline at end of file diff --git a/src/main/md/dlt.md b/src/main/md/dlt.md new file mode 100644 index 00000000..2bf6e1ea --- /dev/null +++ b/src/main/md/dlt.md @@ -0,0 +1 @@ +* https://www.asx.com.au/services/chess-replacement.htm diff --git a/src/main/md/english/vocabulary.md b/src/main/md/english/vocabulary.md new file mode 100644 index 00000000..d3b07175 --- /dev/null +++ b/src/main/md/english/vocabulary.md @@ -0,0 +1,205 @@ +*Apparition* - Ghost like image of a person. “This is not an apparition, but my flesh and blood form." + +*Surmise* - Suppose that something is true without having evidence to confirm it. + +*Evanescent* - Unstable. Astrally-produced objects are structurally evanescent + +*Placate* - to appease or pacify, especially by concessions or conciliatory gestures. After the fight, He now spoke placatingly. + +*Spasmodic* - Irregular bursts. My classroom attendance had been very spasmodic. + +*Allegory* - 'Story or poem that can be interpreted to reveal hidden meaning, typically a moral or political one' - Though this book looks like fiction, it is kind of allegory of History + +*Intransigent* - `Refusing to change one's views` - The most thrillingly intransigent ever uttered: "Heaven and earth shall pass away, but words shall not pass away" + +*Nihilism* - Philisophy that argues that life is without objective meaning, purpose or intrinsic value. "If teenagers go through life in nihilistic frme of mind. Nothing would seem matter for them. + +*Scornfully* - Is defined as something done without respect. Raising eyebrows scornfully. Laughing scornfully. + +*Allegiance* - Loyalty or Commitment to a superior - Monks transfer allegiance from the creation to the Creator + +*Automaton* - a moving mechanical device made in imitation of a human being. Example: That job is to make difficult and sometimes unpopular decisions, and not for members of Parliament to become Brexit-delivering automatons as some would have it. + +*Occidental* - Relating to the countries of the West. Example: Three scales—major, harmonic minor, melodic minor—are the only ones which Occidental music employs, but Indian music outlines 72 thatas or scales. + +*Compunction* - Anxiety arising from awareness of guilt. Example: Despite guru was wrong, He was laughing heartily, without compunction for my disillusionment. +*Indignant* - Feeling or showing anger or annoyance at what is perceived as unfair treatment. Example: On Friday, Meyer posted an indignant response on Twitter to recent media coverage. + +*Surfeit* - An overabundant supply. Having surfeited ourselves on raw oysters, we had to decline the rest of the restaurant's offerings + +*Beguile* - Attract to deceive. God will beguile you with an infinite ingenuity. + +*Altiloquent* - (Adj) Pompous in speech - You remind me of an occasion some time past when reading a book of an altiloquent style. + +Pompous, or pretentious in speech +*Empyrean* - Highest part of heaven, thought by the ancients to be the realm of pure fire. I cognized the center of the empyrean as a point of intuitive perception in my heart. The empyrean domain where human will and God's will became as one. + +*Umbrageous* - Affording shade, spotted with shadows. - A garden with umbrageous trees Is here for you to take your ease. + +*Oracular* - Something that's oracular is giving off an oracle-like vibe: mysterious, enigmatic, prophetic and probably a little weird. Example: “When you come to a crossroad, turn right and keep going,” he finally pronounced oracularly. + +*Genuflection* - Genuflection or genuflexion is the act of bending at least one knee to the ground. Example: I left the temple without genuflection and walked briskly toward the outlying village of Ranbajpur. + +*imperturbable* - Calm - My imperturbable guru listened politely, analyzing himself to see if any shred of truth lay within the denunciation. + +*Inveterate* - "Done as a habit and not likely to change" - Example: My guru ordinarily was gentle and affable to guests; his welcome was given with charming cordiality. Yet inveterate egotists sometimes suffered an invigorating shock. I never trust anything he says - the man's an inveterate liar. + +*Inveterate* - "Done as a habit and not likely to change" - Example: My guru ordinarily was gentle and affable to guests; his welcome was given with charming cordiality. Yet inveterate egotists sometimes suffered an invigorating shock. I never trust anything he says - the man's an inveterate liar. + +*Affable* - Friendly - My guru ordinarily was gentle and affable to guests; + +*Supercilious* - (Adj) Arrogant, A supercilious smile or a glance of amused tolerance occasionally betrayed that the newcomers anticipated nothing more than a few pious platitudes. + +*Flinty* - (Adj) Severe and determined. Example: I thought that he had a flinty tone about him tonight. + +*Imbibe* - To assimilate or to receive into the mind and retain. Examples: From transient teachers of my life I had imbibed a few erroneous lessons. She imbibed vast quantities of coffee. +*Extol* - (v) to praise highly; laud; eulogize: Example: McConnell has extolled the virtues of Medicare Part D and even called President Obama “smart.”, Sri Yukteshwar extolled the superiority of mental therapy, and often repeated: “Wisdom is the greatest cleanser.” + +*ineffable* - Ineffable - Unable to decribe via words. Exeorable - Unable to prevent. I am caught up in the ineffable, seemingly inexorable event that is out of my hands. + +*Cauterize* - Cauterization (or cauterisation, or cautery) is a medical practice or technique of burning a part of a body to remove or close off a part of it. Example: The undesirable habit-mechanisms in the brain can be often cauterized. +*Promiscuous* - demonstrating or implying an unselective approach; indiscriminate or casual. Example: “God does not wish the secrets of His creation revealed promiscuously. +*Recant* - to announce in public that your past beliefs or statements were wrong and that you no longer agree with them: Example: After a year spent in solitary confinement, he publicly recanted (his views). + +*immediacy* - noun - The quality of bringing one into direct and instant involvement with something, giving rise to a sense of urgency or excitement. +Example: Master was keeping track of student's attention with a devastating immediacy. + +*convalescence* - + * He is convalescing from his leg injuries. + * Youth soccer team members rescued more than two weeks after sudden flooding trapped them in a cave complex in Thailand are now convalescing at a hospital in the northern city of Chiang Rai. + +*Perspicacious* - having a ready insight into and understanding of things. His perspicacious grandfather had bought the land as an investment, guessing that there might be gold underground. + +*Indelibly* - adjective - making marks that cannot be erased, removed, or the like: The manifest need of it had been deeply and indelibly impressed. That impression became so indelibly fixed that even after my marriage I refused all positions. + +*consternation* - /kɒnstəˈneɪʃ(ə)n/ - noun -a feeling of anxiety or dismay, typically at something unexpected. the two … stared at each other in consternation, and neither knew what to do. "I hid my consternation." + +*Obeisance* :- Demonstration of an obedient attitude, especially by bowing deeply; a deep bow which demonstrates such an attitude. + +*Lugubrious* :- looking or sounding sad and dismal. +Jitendra maintained a lugubrious silence as our train covered the miles. + +*Beneficent*: the quality or state of doing or producing good : the quality or state of being beneficent +"admired for her beneficence" + +exigency - /ˈɛksɪdʒ(ə)nsi,ˈɛɡzɪdʒ(ə)nsi,ɪɡˈzɪdʒ(ə)nsi,ɛɡˈzɪdʒ(ə)nsi +plural noun: exigencies +an urgent need or demand. +"women worked long hours when the exigencies of the family economy demanded it" +synonyms: need, demand, requirement, want; More + +self-effacement noun. +The act of making oneself, one's actions, etc, inconspicuous, esp because of humility or timidity. + +sojourn - /ˈsɒdʒ(ə)n,ˈsɒdʒəː - noun +a temporary stay. - "her sojourn in Rome" +synonyms: stay, visit, stop, stopover, residence; More +stay somewhere temporarily. - "she had sojourned once in Egypt" +synonyms: stay, live; More + +Irrefragable - /ɪˈrɛfrəɡəb(ə)l/ +adjective - not able to be refuted or disproved; indisputable +With an antenna of irrefragable insight I sensed that my guru knew God + +Exhume - /ɛksˈ(h)juːm,ɪɡˈzjuːm/ +"I was exhuming a knowledge not found in lecture halls." +gerund or present participle: exhuming +dig out (something buried, especially a corpse) from the ground. +"the bodies were exhumed on the orders of a judge" +synonyms: disinter, dig up, unearth, bring out of the ground; More +GEOLOGY +expose (a land surface) that was formerly buried. +"various landforms have been exhumed from beneath a covering of Triassic sediments" + +Placidity - a state of freedom from storm or disturbance +the placidity of the area makes it a perfect vacation spot for people who just want to relax +Here expanse water placidity reminds us of the vast calmness of Universe. + +Chastisement - is the act of scolding or punishing someone. If you talk back to your stern teacher, you won't be surprised by the chastisement that follows. The noun chastisement usually means a verbal reprimand, like the chastisement a basketball coach might give his team after a terrible loss. + +capitulation - /kəpɪtjʊˈleɪʃ(ə)n - noun +the action of ceasing to resist an opponent or demand. +"she gave a sigh of capitulation" + +intercession - /ˌɪntəˈsɛʃ(ə)n/ - noun +the action of intervening on behalf of another. +"he only escaped ruin by the intercession of his peers with the king" +synonyms: mediation, intermediation, negotiation, arbitration, conciliation, intervention, interposition, involvement, action; the action of saying a prayer on behalf of another. +"prayers of intercession" + +Indefatigable - /ˌɪndɪˈfatɪɡəb(ə)l/ adjective +(of a person or their efforts) persisting tirelessly. +"an indefatigable defender of human rights" +synonyms: tireless, untiring, never-tiring, unwearied, unwearying, unflagging; + +Noumenon - noun, plural nou·me·na [noo-muh-nuh] /ˈnu mə nə/. +The object, itself inaccessible to experience, to which a phenomenon is referred for the basis or cause of its sense content. +a thing in itself, as distinguished from a phenomenon or thing as it appears. +In metaphysics, the noumenon is a posited object or event that exists independently of human sense and/or perception. The term noumenon is generally used when contrasted with, or in relation to, the term phenomenon, which refers to anything that can be apprehended by or is an object of the senses. + +crescograph - A crescograph is a device for measuring the growth in plants. It was invented in the early 20th century by Sir Jagadish Chandra Bose. The Bose crescograph uses a series of clockwork gears and a smoked glass plate to record the movement of the tip of a plant (or its roots) at magnifications of up to 10,000. + +Ensanguined - simple past tense and past participle of ensanguine +Adjective - ensanguined (comparative more ensanguined, superlative most ensanguined) +Bloodstained, bloody. + +ostentatious -/ˌɒstɛnˈteɪʃəs/ - adjective +characterized by pretentious or showy display; designed to impress. +"a simple design that is glamorous without being ostentatious" +synonyms: showy, pretentious, conspicuous, obtrusive, flamboyant, gaudy, garish, tinsel, tinselly, brash, vulgar, loud, extravagant, fancy, ornate, affected, theatrical, actorly, overdone, over-elaborate, kitsch, tasteless; More + +garrulous +/ˈɡar(j)ʊləs/ +adjective +excessively talkative, especially on trivial matters. +"a garrulous cab driver" +synonyms: talkative, loquacious, voluble, verbose, long-winded, chatty, chattery, chattering, gossipy, gossiping, babbling, blathering, prattling, prating, jabbering, gushing, effusive, expansive, forthcoming, conversational, communicative; More + +importunity +Importunity is when you beg someone to do something. "Please, please take me to the mall!" is probably something said by many teens with importunity. + +clairvoyance - /klɛːˈvɔɪəns/ - noun +the supposed faculty of perceiving things or events in the future or beyond normal sensory contact. +"she stared at the card as if she could contact its writer by clairvoyance" + + +Quaint - attractively unusual or old-fashioned. +"quaint country cottages" +synonyms: picturesque, charming, sweet, attractive, pleasantly old-fashioned, old-fashioned, old-world, toytown; More cunning; informaltwee, arty-crafty; +"narrow streets lead to a quaint bridge over the river" + +Aesthete - Appreciator or beauty. Steve jobs is a aesthete + +Eponym - Derived from a name +Alexandria - is an eponym because it is derived from name "Alexandar the Great" + +facetious - /fəˈsiːʃəs/ - adjective +treating serious issues with deliberately inappropriate humour; flippant. +"a facetious remark" +synonyms: flippant, flip, glib, frivolous, tongue-in-cheek, waggish, whimsical, joking, jokey, jesting, jocular, playful, roguish, impish, teasing, arch, mischievous, puckish; More + +malapropism - /ˈmaləprɒˌpɪz(ə)m/ - noun +the mistaken use of a word in place of a similar-sounding one, often with an amusing effect (e.g. ‘dance a flamingo ’ instead of flamenco ). +synonyms: wrong word, solecism, error, misuse, misusage, misapplication, infelicity, slip of the tongue + + +peevish - /ˈpiːvɪʃ/ - adjective +having or showing an irritable disposition. +"a thin peevish voice" + +maladroit - /ˌmaləˈdrɔɪt/ - adjective +inefficient or inept; clumsy. +"both men are unhappy about the maladroit way the matter has been handled" +synonyms: bungling, awkward, inept, clumsy, bumbling, incompetent, unskilful, heavy-handed, ungainly, inelegant, inexpert, graceless, ungraceful, gauche, unhandy, uncoordinated, gawky, cloddish, clodhopping, all fingers and thumbs, flat-footed, lumbering; More + + +abrogate - /ˈabrəɡeɪt/ - verbFORMAL +repeal or do away with (a law, right, or formal agreement). +"a proposal to abrogate temporarily the right to strike" +synonyms: repudiate, revoke, repeal, rescind, overturn, overrule, override, do away with, annul, cancel, break off, invalidate, nullify, void, negate, dissolve, countermand, veto, declare null and void, discontinue; More +evade (a responsibility or duty). +"we believe the board is abrogating its responsibilities to its shareholders + +schadenfreude - /ˈʃɑːd(ə)nˌfrɔɪdə,German ˈʃɑːdənˌfrɔydə/ - noun +pleasure derived by someone from another person's misfortune. +"a business that thrives on Schadenfreude" \ No newline at end of file diff --git a/src/main/md/fsync.md b/src/main/md/fsync.md new file mode 100644 index 00000000..6c1edbe0 --- /dev/null +++ b/src/main/md/fsync.md @@ -0,0 +1,26 @@ +##FSync + +## Durability and Concept of checkpoint + * Every time check point is invoked + * Shared buffers (Managed by PGSQL) - WAL Buffers (Top layer) + * Page Cache (Kernel Managed) (mid layer) + * [Data Files] and [WAL - TransactonLog] + +## Shared buffers +* Are written to page cache + +## Page Cache (Kernel Managed) (mid layer) +* It invokes fsync to store to underlying file (DataFiles and WAL) + +## When IO error happens +* We expect fsync to tries to rewrite the same data +* fsync may write data on network storage, even tiny network failure might cause fsync to fail +* But fsync won't retry next time, but marks the actual buffer as clean (kind of insync with io) (ext4) + +## One file might have multile FILE descriptor +* When failure reported in one process the should be reported to other process, but only first process gets the error + + +## Reference +*[PostgreSQL vs. fsync How is it possible that PostgreSQL used fsync incorrectly for 20 years, and wh…](https://www.youtube.com/watch?v=1VWIGBQLtxo) +* Write-Ahead Logging (WAL) \ No newline at end of file diff --git a/src/main/md/gcp/Debian_VM.md b/src/main/md/gcp/Debian_VM.md new file mode 100644 index 00000000..44326f37 --- /dev/null +++ b/src/main/md/gcp/Debian_VM.md @@ -0,0 +1,39 @@ +```bash +sudo passwd //TO: Change root password +sudo apt-get update +sudo apt-get install default-jdk +sudo apt-get install openjdk-11-jdk +sudo apt-get install software-properties-common +sudo add-apt-repository "deb http://ppa.launchpad.net/webupd8team/java/ubuntu xenial main" +sudo apt-get update +sudo apt-get install oracle-java8-installer + +ls -ltar /usr/bin/java +/usr/bin/java -> /etc/alternatives/java +ls -ltar /etc/alternatives/java +/etc/alternatives/java -> /usr/lib/jvm/java-11-openjdk-amd64/bin/java +``` + +```bash +sudo snap install docker +docker run -d --name elasticsearch -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" elasticsearch:6.8.6 +``` + +```bash +sudo install unzip +sudo bash +sudo mkdir -p /project/elasticsearch/ +cd /project/elasticsearch/ +wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.8.6.zip +wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.8.6.zip.sha512 +shasum -a 512 -c elasticsearch-6.8.6.zip.sha512 +unzip elasticsearch-6.8.6.zip +cd elasticsearch-6.8.6/ +``` + +```bash +mkdir -p /project/elasticsearch/ +useradd -d /project/elasticsearch/ elasticsearch +chown elasticsearch:elaticsearch /project/elasticsearch +passwd elasticsearch +``` \ No newline at end of file diff --git a/src/main/md/gcp/GCP_GKE.md b/src/main/md/gcp/GCP_GKE.md new file mode 100644 index 00000000..df3f1dba --- /dev/null +++ b/src/main/md/gcp/GCP_GKE.md @@ -0,0 +1,105 @@ +## Basic commands +```bash +gcloud config list account +gcloud config list project +gcloud config set compute/zone asia-southeast1 +gcloud config list +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud services enable pubsub.googleapis.com +gcloud services enable container.googleapis.com +gcloud services enable containerregistry.googleapis.com +gcloud pubsub topics create messages +``` + +## Twelth lab (deploying simple application in GCP without db) + +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud services enable container.googleapis.com +gcloud services enable containerregistry.googleapis.com + +## Create a Kubernetes Engine cluster that has Stackdriver Logging and Monitoring enabled. +gcloud container clusters create guestbook-cluster --zone=us-central1-a --num-nodes=2 + --machine-type=n1-standard-2 \ + --enable-autorepair \ + --enable-cloud-monitoring \ + --enable-cloud-logging +gcloud config list --format 'value(core.project)' +~/guestbook-frontend/pom.xml +remove this line - tomcat + + com.google.cloud.tools + jib-maven-plugin + 0.9.6 + + gcr.io/[PROJECT_ID]/guestbook-frontend + + +Replace the PROJECT_ID with actual project id +cd ~/guestbook-frontend +./mvnw clean compile jib:build +cd ~/guestbook-service +./mvnw clean compile jib:build + +# you create a service account with permissions to access your GCP services. +# You then store the service account that you generated earlier in Kubernetes as a secret so that it is accessible from the containers. +gcloud iam service-accounts create guestbook +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gcloud projects add-iam-policy-binding ${PROJECT_ID} --member serviceAccount:guestbook@${PROJECT_ID}.iam.gserviceaccount.com --role roles/editor +gcloud iam service-accounts keys create ~/service-account.json --iam-account guestbook@${PROJECT_ID}.iam.gserviceaccount.com +## Kubernetes server version +kubectl version +kubectl create secret generic guestbook-service-account --from-file=$HOME/service-account.json +kubectl describe secret guestbook-service-account +~/kubernetes/guestbook-frontend-deployment.yaml +image: saturnism/spring-gcp-guestbook-frontend:latest replace with gcr.io/[PROJECT_ID]/guestbook-frontend +~/kubernetes/guestbook-service-deployment.yaml. +image: saturnism/spring-gcp-guestbook-frontend:latest replace with gcr.io/[PROJECT_ID]/guestbook-service +kubectl apply -f ~/kubernetes/ +kubectl get svc guestbook-frontend +kubectl get svc +http://[EXTERNAL_IP]:8080 +``` + +## Thirteen events, monitoring (promethus kubernetes monitoring) + +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud services enable container.googleapis.com +gcloud services enable containerregistry.googleapis.com + +gcloud container clusters get-credentials guestbook-cluster --zone=us-central1-a +kubectl apply -f https://storage.googleapis.com/stackdriver-prometheus-documentation/rbac-setup.yml --as=admin --as-group=system:masters +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +curl -s https://storage.googleapis.com/stackdriver-prometheus-documentation/prometheus-service.yml | sed -e "s/\(\s*_kubernetes_cluster_name:*\).*/\1 'guestbook-cluster'/g" | sed -e "s/\(\s*_kubernetes_location:*\).*/\1 'us-central1'/g" | sed -e "s/\(\s*_stackdriver_project_id:*\).*/\1 '${PROJECT_ID}'/g" | kubectl apply -f - +kubectl get pods -n stackdriver +~/guestbook-frontend/pom.xml + + org.springframework.boot + spring-boot-starter-actuator + + + io.micrometer + micrometer-registry-prometheus + runtime + +@~/guestbook-frontend/src/main/resources/application.properties +management.server.port=8081 +management.endpoints.web.exposure.include=* +cd ~/guestbook-frontend +./mvnw clean compile jib:build +~/kubernetes/guestbook-frontend-deployment.yaml + +``` diff --git a/src/main/md/gcp/GCP_Images.md b/src/main/md/gcp/GCP_Images.md new file mode 100644 index 00000000..848f30d7 --- /dev/null +++ b/src/main/md/gcp/GCP_Images.md @@ -0,0 +1,51 @@ +{ + "archiveSizeBytes": "12301305600", + "creationTimestamp": "2019-12-16T09:52:31.658-08:00", + "description": "Debian, Debian GNU/Linux, 9 (stretch), amd64 built on 20191210", + "diskSizeGb": "10", + "family": "debian-9", + "guestOsFeatures": [ + { + "type": "VIRTIO_SCSI_MULTIQUEUE" + } + ], + "id": "4836861303305435824", + "kind": "compute#image", + "labelFingerprint": "42WmSpB8rSM=", + "licenseCodes": [ + "1000205" + ], + "licenses": [ + "projects/debian-cloud/global/licenses/debian-9-stretch" + ], + "name": "debian-9-stretch-v20191210", + "rawDisk": { + "containerType": "TAR", + "source": "" + }, + "selfLink": "projects/debian-cloud/global/images/debian-9-stretch-v20191210", + "sourceType": "RAW", + "status": "READY", + "storageLocations": [ + "eu", + "us", + "eu", + "eu", + "asia", + "us", + "eu", + "asia", + "asia", + "us", + "us", + "asia", + "eu", + "us", + "us", + "asia", + "eu", + "us", + "asia", + "asia" + ] +} \ No newline at end of file diff --git a/src/main/md/gcp/GCP_Most_useful_commands.md b/src/main/md/gcp/GCP_Most_useful_commands.md new file mode 100644 index 00000000..e8c22c4d --- /dev/null +++ b/src/main/md/gcp/GCP_Most_useful_commands.md @@ -0,0 +1,19 @@ +## Often useful commands +```bash +gcloud compute zones list +gcloud compute images list +gcloud compute instances list +gcloud config set compute/zone asia-southeast1-b +# --maintenance-policy=MIGRATE or Terminate +gcloud compute --project=qwiklabs-gcp-01-ecced1382d7b instances create instance-1 --zone=asia-southeast1-b --machine-type=f1-micro --subnet=default --network-tier=PREMIUM --service-account=881468636460-compute@developer.gserviceaccount.com --tags=http-server,https-server --image=ubuntu-1804-bionic-v20191211 --image-project=ubuntu-os-cloud --boot-disk-size=10GB --boot-disk-type=pd-standard --boot-disk-device-name=instance-1 --reservation-affinity=any --preemptible + +gcloud compute --project=qwiklabs-gcp-01-ecced1382d7b firewall-rules create default-allow-http --direction=INGRESS --priority=1000 --network=default --action=ALLOW --rules=tcp:80 --source-ranges=0.0.0.0/0 --target-tags=http-server + +gcloud compute --project=qwiklabs-gcp-01-ecced1382d7b firewall-rules create default-allow-https --direction=INGRESS --priority=1000 --network=default --action=ALLOW --rules=tcp:443 --source-ranges=0.0.0.0/0 --target-tags=https-server + +gcloud compute ssh instance-1 + +gcloud compute --project=gcp-nikias-data-search firewall-rules create tomcat --direction=INGRESS --priority=1000 --network=default --action=ALLOW --rules=tcp:8080 --source-ranges=0.0.0.0/0 --target-tags=tomcat + +``` + diff --git a/src/main/md/gcp/Gcp_commands_pluralsight_spring.md b/src/main/md/gcp/Gcp_commands_pluralsight_spring.md new file mode 100644 index 00000000..3fa056f9 --- /dev/null +++ b/src/main/md/gcp/Gcp_commands_pluralsight_spring.md @@ -0,0 +1,728 @@ + +## Basic commands +```bash +gcloud config list account +gcloud config list project +gcloud config set compute/zone asia-southeast1 +gcloud config list +``` + +## First lab (deploying simple application in GCP without db) +```bash +cd ~/ +git clone https://github.com/saturnism/spring-cloud-gcp-guestbook.git +cp -a ~/spring-cloud-gcp-guestbook/1-bootstrap/guestbook-service ~/guestbook-service +cd ~/guestbook-service +./mvnw -q spring-boot:run -Dserver.port=8081 +curl http://localhost:8081/guestbookMessages +curl -XPOST -H "content-type: application/json" -d '{"name": "Ray", "message": "Hello"}' http://localhost:8081/guestbookMessages +curl http://localhost:8081/guestbookMessages + +cp -a ~/spring-cloud-gcp-guestbook/1-bootstrap/guestbook-frontend ~/guestbook-frontend +curl -s http://localhost:8081/guestbookMessages +curl -s http://localhost:8081/guestbookMessages | jq -r '._embedded.guestbookMessages[] | {name: .name, message: .message}' +``` + +## second lab (deploying spring database application in GCP with Cloud SQL) +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +# Edit guestbook-service/pom.xml + + + + org.springframework.cloud + spring-cloud-gcp-dependencies + 1.2.0.RELEASE + pom + import + + + + + + org.springframework.cloud + spring-cloud-gcp-starter-sql-mysql + +Update guestbook-service/src/main/resources/application.properties +spring.cloud.gcp.sql.enabled=false +gcloud sql instances describe guestbook --format='value(connectionName)' +qwiklabs-gcp-4d0ab38f9ff2cc4c:us-central1:guestbook +application-cloud.properties +spring.cloud.gcp.sql.enabled=true +spring.cloud.gcp.sql.database-name=messages +spring.cloud.gcp.sql.instance-connection-name=YOUR_INSTANCE_CONNECTION_NAME +spring.datasource.hikari.maximum-pool-size=5 + +cd ~/guestbook-service +./mvnw spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud +curl -XPOST -H "content-type: application/json" -d '{"name": "Ray", "message": "Hello Cloud SQL"}' http://localhost:8081/guestbookMessages +curl http://localhost:8081/guestbookMessages +gcloud sql connect guestbook; +use messages; +select * from guestbook_message; + + +``` +## Enable cloud sql +```bash +gcloud services enable sqladmin.googleapis.com +gcloud services list | grep sqladmin +gcloud sql instances list +gcloud sql instances create guestbook --region=asia-southeast1 +gcloud sql databases create messages --instance guestbook +gcloud sql connect guestbook +show databases; +use messages; +CREATE TABLE guestbook_message ( + id BIGINT NOT NULL AUTO_INCREMENT, + name CHAR(128) NOT NULL, + message CHAR(255), + image_uri CHAR(255), + PRIMARY KEY (id) +); +exit +``` + +## Thrid lab (Cloud configuration) +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +@guestbook-frontend/pom.xml + + org.springframework.cloud + spring-cloud-gcp-starter-config + + + com.google.guava + guava + 20.0 + + + org.springframework.boot + spring-boot-starter-actuator + + +@guestbook-frontend/pom.xml + + + org.springframework.cloud + spring-cloud-gcp-dependencies + 1.2.0.RELEASE + pom + +@guestbook-frontend/pom.xml + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + +guestbook-frontend/src/main/resources/bootstrap.properties +spring.cloud.gcp.config.enabled=false + +guestbook-frontend/src/main/resources/bootstrap-cloud.properties +spring.cloud.gcp.config.enabled=true +spring.cloud.gcp.config.name=frontend +spring.cloud.gcp.config.profile=cloud + +guestbook-frontend/src/main/resources/application.properties +management.endpoints.web.exposure.include=* + +@guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java +import org.springframework.cloud.context.config.annotation.RefreshScope; + +@RefreshScope + +gcloud services enable runtimeconfig.googleapis.com +gcloud beta runtime-config configs create frontend_cloud +gcloud beta runtime-config configs variables set greeting "Hi from Runtime Config" --config-name frontend_cloud +gcloud beta runtime-config configs variables list --config-name=frontend_cloud +gcloud beta runtime-config configs variables get-value greeting --config-name=frontend_cloud + +cd ~/guestbook-service +./mvnw -q spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud + +gcloud beta runtime-config configs variables set greeting "Hi from Updated Config" --config-name frontend_cloud +curl -XPOST http://localhost:8080/actuator/refresh +curl http://localhost:8080/actuator/configprops | jq +``` + +## Fourth lab - Tracing, configuration management, and integration +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud services enable cloudtrace.googleapis.com +dependencies@~/guestbook-service/pom.xml + + org.springframework.cloud + spring-cloud-gcp-starter-trace + +dependencies@~/guestbook-frontend/pom.xml + + org.springframework.cloud + spring-cloud-gcp-starter-trace + +@guestbook-frontend/src/main/resources/application.properties +spring.cloud.gcp.trace.enabled=false +@guestbook-service/src/main/resources/application.properties +spring.cloud.gcp.trace.enabled=false +@guestbook-service/src/main/resources/application-cloud.properties +spring.cloud.gcp.trace.enabled=true +spring.sleuth.sampler.probability=1 +spring.sleuth.web.skipPattern=(^cleanup.*|.+favicon.*) +guestbook-frontend/src/main/resources/application-cloud.properties +spring.cloud.gcp.trace.enabled=true +spring.sleuth.sampler.probability=1 +spring.sleuth.web.skipPattern=(^cleanup.*|.+favicon.*) + +#Create a service account with permissions to propagate trace data to Stackdriver Trace +gcloud iam service-accounts create guestbook +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gcloud projects add-iam-policy-binding ${PROJECT_ID} --member serviceAccount:guestbook@${PROJECT_ID}.iam.gserviceaccount.com --role roles/editor + +#Generate the JSON key file to be used by the application to identify itself using the service account +gcloud iam service-accounts keys create ~/service-account.json --iam-account guestbook@${PROJECT_ID}.iam.gserviceaccount.com +# Above command creates service account credentials that are stored in the $HOME/service-account.json file. Treat the service-account.json file as your own username/password + +cd ~/guestbook-service +./mvnw spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud -Dspring.cloud.gcp.credentials.location=file:///$HOME/service-account.json +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud -Dspring.cloud.gcp.credentials.location=file:///$HOME/service-account.json +curl http://localhost:8080 +Gconsole > Stackdriver > Trace > Trace List. +``` + +## Fifth lab -message handling service with Cloud Pub/Sub +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud services enable pubsub.googleapis.com +gcloud pubsub topics create messages + +@~/guestbook-frontend/pom.xml + + org.springframework.cloud + spring-cloud-gcp-starter-pubsub + +@guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java +import org.springframework.cloud.gcp.pubsub.core.*; +@Autowired +private PubSubTemplate pubSubTemplate; +# /** Add inside the if statement to process messages that aren't null or empty, just above the comment Post **/ + +pubSubTemplate.publish("messages", name + ": " + message); + +cd ~/guestbook-service +./mvnw -q spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud + +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud +gcloud pubsub subscriptions list +gcloud pubsub subscriptions create messages-subscription-1 --topic=messages +//Post message using UI and run below command +gcloud pubsub subscriptions pull messages-subscription-1 +//Post message using UI and run below command, but below one would remove +gcloud pubsub subscriptions pull messages-subscription-1 --auto-ack +``` + +## Fifth lab (part-2) Process messages in subscription +```bash +cd ~ +curl https://start.spring.io/starter.tgz -d dependencies=cloud-gcp-pubsub -d baseDir=message-processor | tar -xzvf - +@~/message-processor/pom.xml + + + org.springframework.cloud + spring-cloud-gcp-starter-pubsub + + + org.springframework.boot + spring-boot-starter-test + test + + +@~/message-processor/src/main/java/com/example/demo/DemoApplication.java +import org.springframework.context.annotation.Bean; +import org.springframework.boot.CommandLineRunner; +import org.springframework.cloud.gcp.pubsub.core.*; + + + @Bean + public CommandLineRunner cli(PubSubTemplate pubSubTemplate) { + return (args) -> { + pubSubTemplate.subscribe("messages-subscription-1", + (msg, ackConsumer) -> { + System.out.println(msg.getData().toStringUtf8()); + ackConsumer.ack(); + }); + }; + } +cd ~/message-processor +./mvnw -q spring-boot:run +//Open the browser with the frontend application, and post a few messages. +//Verify that the Cloud Pub/Sub messages are received in the message processor +``` + + +## Sixt lab - Spring Integration that abstracts from the underlying messaging system (rather integration with Cloud Pub/Sub) +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud pubsub topics list +dependencies@~/guestbook-frontend/pom.xml + + org.springframework.integration + spring-integration-core + +OutboundGateway.java@~/guestbook-frontend/src/main/java/com/example/frontend +package com.example.frontend; + +import org.springframework.integration.annotation.MessagingGateway; + +@MessagingGateway(defaultRequestChannel = "messagesOutputChannel") +public interface OutboundGateway { + void publishMessage(String message); +} +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java + @Autowired + private PubSubTemplate pubSubTemplate; //remove this + @Autowired + private OutboundGateway outboundGateway; //add this + outboundGateway.publishMessage(name + ": " + message); //instead of pubSubTemplate.publish("messages", name + ": " + message); + +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendApplication.java + +import org.springframework.context.annotation.*; +import org.springframework.cloud.gcp.pubsub.core.*; +import org.springframework.cloud.gcp.pubsub.integration.outbound.*; +import org.springframework.integration.annotation.*; +import org.springframework.messaging.*; + + @Bean + @ServiceActivator(inputChannel = "messagesOutputChannel") + public MessageHandler messageSender(PubSubTemplate pubsubTemplate) { + return new PubSubMessageHandler(pubsubTemplate, "messages"); + } + +cd ~/guestbook-service +./mvnw -q spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud +gcloud pubsub subscriptions pull messages-subscription-1 --auto-ack +gcloud pubsub subscriptions list +``` + +* Spring Integration for Cloud Pub/Sub works for both inbound messages and outbound messages. +* Cloud Pub/Sub also supports Spring Cloud Stream to create reactive microservices. + +## Seventh lab - Add the ability to upload an image associated with a message. You store the image in Cloud Storage. +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +dependencies@~/guestbook-frontend/pom.xml + + org.springframework.cloud + spring-cloud-gcp-starter-storage + +~/guestbook-frontend/src/main/resources/templates/index.html +
(remove) + + + +Insert the following tags before the tag: + + File: + +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java + +import org.springframework.cloud.gcp.core.GcpProjectIdProvider; +import org.springframework.web.multipart.MultipartFile; +import org.springframework.context.ApplicationContext; +import org.springframework.core.io.Resource; +import org.springframework.core.io.WritableResource; +import org.springframework.util.StreamUtils; +import java.io.*; + +//Insert the following code near the beginning of the FrontendController class definition, immediately before @GetMapping("/"). + // The ApplicationContext is needed to create a new Resource. + @Autowired + private ApplicationContext context; + // Get the Project ID, as its Cloud Storage bucket name here + @Autowired + private GcpProjectIdProvider projectIdProvider; + +Change the definition for the post method +You modify the post method to save uploaded images to Cloud Storage. +1. Near the end of the file, change this line: public String post(@RequestParam String name, @RequestParam String message, Model model) { +To these lines: + public String post( + @RequestParam(name="file", required=false) MultipartFile file, + @RequestParam String name, + @RequestParam String message, Model model) + throws IOException { + +2. Insert the following code immediately after the line model.addAttribute("name", name);: + String filename = null; + if (file != null && !file.isEmpty() + && file.getContentType().equals("image/jpeg")) { + // Bucket ID is our Project ID + String bucket = "gs://" + + projectIdProvider.getProjectId(); + // Generate a random file name + filename = UUID.randomUUID().toString() + ".jpg"; + WritableResource resource = (WritableResource) + context.getResource(bucket + "/" + filename); + // Write the file to Cloud Storage + try (OutputStream os = resource.getOutputStream()) { + os.write(file.getBytes()); + } + } +3. Add the following code to insert the location of the uploaded file immediately before the client.add(payload); line: + // Store the generated file name in the database + payload.put("imageUri", filename); +The complete post method definition should look like the screenshot: + +cd ~/guestbook-service +./mvnw -q spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud + +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java. +import org.springframework.http.*; +// ".+" is necessary to capture URI with filename extension + @GetMapping("/image/{filename:.+}") + public ResponseEntity file(@PathVariable String filename) { + String bucket = "gs://" + projectIdProvider.getProjectId(); + // Use "gs://" URI to construct + // a Spring Resource object + Resource image = context.getResource(bucket + "/" + filename); + // Send it back to the client + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.IMAGE_JPEG); + return new ResponseEntity<>(image, headers, HttpStatus.OK); + } + +main.container@~/guestbook-frontend/src/main/resources/templates/index.html +image +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud + +``` + +## Eigth lab - Vision cloud API +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud services enable vision.googleapis.com + +dependencies@~/guestbook-frontend/pom.xml + + com.google.cloud + google-cloud-vision + + +@~/guestbook-frontend/src/main/resources/application.properties +spring.cloud.gcp.credentials.scopes=https://www.googleapis.com/auth/cloud-platform + +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendApplication.java +import java.io.IOException; +import com.google.cloud.vision.v1.*; +import com.google.api.gax.core.CredentialsProvider; + + + // This configures the Vision API settings with a + // credential using the the scope we specified in + // the application.properties. + @Bean + public ImageAnnotatorSettings imageAnnotatorSettings(CredentialsProvider credentialsProvider) throws IOException { + return ImageAnnotatorSettings.newBuilder() + .setCredentialsProvider(credentialsProvider).build(); + } + + @Bean + public ImageAnnotatorClient imageAnnotatorClient( + ImageAnnotatorSettings settings) + throws IOException { + return ImageAnnotatorClient.create(settings); + } + +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java +import com.google.cloud.vision.v1.*; + + @Autowired +private ImageAnnotatorClient annotatorClient; + +private void analyzeImage(String uri) { + // After the image was written to GCS + // analyze it with the GCS URI.It,s also + // possible to analyze an image embedded in + // the request as a Base64 encoded payload. // + List requests = new ArrayList<>(); + ImageSource imgSrc = ImageSource.newBuilder() + .setGcsImageUri(uri).build(); + Image img = Image.newBuilder().setSource(imgSrc).build(); + Feature feature = Feature.newBuilder() + .setType(Feature.Type.LABEL_DETECTION).build(); + AnnotateImageRequest request = AnnotateImageRequest + .newBuilder() + .addFeatures(feature) + .setImage(img) + .build(); + requests.add(request); + BatchAnnotateImagesResponse responses = + annotatorClient.batchAnnotateImages(requests); + // We send in one image, expecting just + // one response in batch + AnnotateImageResponse response =responses.getResponses(0); + System.out.println(response); +} + +@~/guestbook-frontend/src/main/java/com/example/frontend/FrontendController.java + // After written to GCS, analyze the image. + analyzeImage(bucket + "/" + filename); + +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gcloud iam service-accounts create guestbook +gcloud projects add-iam-policy-binding ${PROJECT_ID} --member serviceAccount:guestbook@${PROJECT_ID}.iam.gserviceaccount.com --role roles/editor +gcloud iam service-accounts keys create ~/service-account.json --iam-account guestbook@${PROJECT_ID}.iam.gserviceaccount.com + +cd ~/guestbook-service +./mvnw -q spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud +cd ~/guestbook-frontend +./mvnw spring-boot:run -Dspring.profiles.active=cloud -Dspring.cloud.gcp.credentials.location=file:///$HOME/service-account.json + +``` + +## Ninth lab - Deploying to APP Engine +```bash +gcloud config set compute/zone asia-southeast1 +gcloud app create --region=asia-southeast1 +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +#gcloud app create --region=asia-southeast1 +gcloud app create --region=us-central + +dependencies@~/guestbook-frontend/pom.xml + + com.google.cloud.tools + appengine-maven-plugin + 1.3.1 + + 1 + + + +mkdir -p ~/guestbook-frontend/src/main/webapp/WEB-INF/ +appengine-web.xml@~/guestbook-frontend/src/main/webapp/WEB-INF/ + + default + 1 + true + java8 + B4_1G + true + + 2 + + + + + + +gcloud beta runtime-config configs variables set messages.endpoint "https://guestbook-service-dot-${PROJECT_ID}.appspot.com/guestbookMessages" --config-name frontend_cloud +cd ~/guestbook-frontend +./mvnw appengine:deploy -DskipTests +gcloud app browse + +~/guestbook-service/pom.xml + + com.google.cloud.tools + appengine-maven-plugin + 1.3.1 + + 1 + + +mkdir -p ~/guestbook-service/src/main/webapp/WEB-INF/ + +appengine-web.xml@~/guestbook-service/src/main/webapp/WEB-INF/ + + guestbook-service + 1 + true + java8 + B4_1G + + 2 + + + + + + +cd ~/guestbook-service +./mvnw appengine:deploy -DskipTests +gcloud app browse -s guestbook-service +``` + + +## Tenth lab - Stackdriver debugger appengine +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud app browse + +Stackdriver > Logging > Logs +GAE Application > Default Service > 1 +Stackdriver > Debug + +gcloud beta debug source upload --project=qwiklabs-gcp-00-f82b10a6fe39 --branch=64CC16049CF564197B0F LOCAL_PATH + +gcloud services enable sourcerepo.googleapis.com +gcloud source repos create google-source-captures +cd ~/guestbook-frontend +git config --global user.email $(gcloud config get-value core/account) +git config --global user.name "devstar" +gcloud beta debug source upload --project=$PROJECT_ID --branch=64CC16049CF564197B0F ./src/ + +Stackdriver > Monitoring +Create Workspace +Skip AWS Setup +Resources > GCP > App Engine +``` + +## Elevent lab - Cloud Spanner database +```bash +export PROJECT_ID=$(gcloud config list --format 'value(core.project)') +gsutil ls gs://$PROJECT_ID +gsutil -m cp -r gs://$PROJECT_ID/* ~/ +chmod +x ~/guestbook-frontend/mvnw +chmod +x ~/guestbook-service/mvnw +gcloud app browse +gcloud services enable spanner.googleapis.com +gcloud spanner instances create guestbook --config=regional-us-central1 --nodes=1 --description="Guestbook messages" +gcloud spanner databases create messages --instance=guestbook + +cd ~/guestbook-service +mkdir db +spanner.ddl@~/guestbook-service/db/ +CREATE TABLE guestbook_message ( + id STRING(36) NOT NULL, + name STRING(255) NOT NULL, + image_uri STRING(255), + message STRING(255) +) PRIMARY KEY (id); +gcloud spanner databases ddl update messages --instance=guestbook --ddl="$( Storage >__ Spanner._ > Guestbook messages > guestbook_message > Data + +pom.xml@~/guestbook-service/pom.xml + + org.springframework.cloud + spring-cloud-gcp-starter-data-spanner + + +~/guestbook-service/src/main/resources/application-cloud.properties +spring.cloud.gcp.sql.enabled=true +spring.cloud.gcp.sql.database-name=messages +spring.cloud.gcp.sql.instance-connection-name=... +spring.cloud.gcp.spanner.instance-id=guestbook +spring.cloud.gcp.spanner.database=messages + + +~/guestbook-service/src/main/java/com/example/guestbook/GuestbookMessage.java + +package com.example.guestbook; + +import lombok.*; +import org.springframework.cloud.gcp.data.spanner.core.mapping.*; +import org.springframework.data.annotation.Id; + +@Data +@Table(name = "guestbook_message") +public class GuestbookMessage { + @PrimaryKey + @Id + private String id; + + private String name; + + private String message; + + @Column(name = "image_uri") + private String imageUri; + + public GuestbookMessage() { + this.id = java.util.UUID.randomUUID().toString(); + } +} + +~/guestbook-service/src/main/java/com/example/guestbook/GuestbookMessageRepository.java + +public interface GuestbookMessageRepository extends PagingAndSortingRepository { + java.util.List findByName(String name); +} +cd ~/guestbook-service +./mvnw spring-boot:run -Dserver.port=8081 -Dspring.profiles.active=cloud +curl -XPOST -H "content-type: application/json" -d '{"name": "Ray", "message": "Hello Cloud Spanner"}' http://localhost:8081/guestbookMessages +curl http://localhost:8081/guestbookMessages +curl http://localhost:8081/guestbookMessages/search/findByName?name=Ray +gcloud spanner databases execute-sql messages --instance=guestbook --sql="SELECT * FROM guestbook_message WHERE name = 'Ray'" +Spanner > Guestbook messages > messages > guestbook_message > data +cd ~/guestbook-service +./mvnw clean appengine:deploy -DskipTests +gcloud app browse -s guestbook-service +gcloud app browse -s default +``` + +## Google app-engine commands +```bash +gcloud app logs tail -s default +gcloud app browse +``` + +## Reference +* [Cloud shell](https://cloud.google.com/shell/docs/) +* [Spring cloud GCP samples](https://github.com/spring-cloud/spring-cloud-gcp/tree/master/spring-cloud-gcp-samples/spring-cloud-gcp-config-sample) +* [Regions and Zones](https://cloud.google.com/compute/docs/regions-zones/) +* [Spring guestbook app](https://github.com/saturnism/spring-cloud-gcp-guestbook) + + +## Follow-up? + +* How to import structured data stored on Cloud Storage directly into BigQuery for ad hoc data analytics using standard SQL? \ No newline at end of file diff --git a/src/main/md/git/git_reference.md b/src/main/md/git/git_reference.md new file mode 100644 index 00000000..b34cc422 --- /dev/null +++ b/src/main/md/git/git_reference.md @@ -0,0 +1,66 @@ +# Find diff between local commit with origin +git diff origin/master -- + +# TO list the changes with remote branch before commiting +git diff origin/branch -- + + +# Create patch with origin +git format-patch origin/master + +# Create patch for SHA +git format-patch -1 SHA1 + +git diff SHA1 SHA2 > diff_patch.patch + +# Reset my local branch exactly like Origin +git fetch origin +git reset --hard origin/master + +# Git apply patch locally +git apply --stat fix_empty_poster.patch (test) +git apply --check fix_empty_poster.patch (check) +git am --signoff < fix_empty_poster.patch + +# List commit ids +git log | grep commit | gawk '{print $2}' + +# List commit done on weekdays alone +git log | grep Date | grep -v "Sun" | grep -v "Sat" + +# Git overwrite one file +git fetch +git checkout origin/master + +Find lat n commit by author + +# Git RCA commands +* To find how a commit ended up in a revision +```bash +git tag --contains shacommit + +git log .. --name-only --format="%aN <%aE>" --reverse + +git log tag1..tag2 +git diff tag1 tag2 --stat +git diff tag1 tag2 -- some/file/name +``` + +# Git Merging +```bash +git log -n 2 --author=Mohan --pretty=format:%H | tac +git checkout branch (to merge content added in HEAD +git cherry-pick 6db4270dec5d0035430683c399c6c1340cc83c67 +git cherry-pick 2bbfedb7998608298e669c0b4989f3225c4ab79e +``` + + +# Handling git line feed +Eclispe > Window > Preferences > General > Workspace > New File line Delimiter > Unix +Eclipse Preferences / Team / Git / Configuration / User Settings @ Core Section - key: autocrlf value: false +git config --system core.whitespace cr-at-eol +git config --system core.autocrlf false + +# Reference +* [Tomy's reference](https://gist.github.com/mohanmca/d405dd27fdfa92b51975) +* [Ashish Rawat's reference](https://gist.github.com/eashish93/3eca6a90fef1ea6e586b7ec211ff72a5) \ No newline at end of file diff --git a/src/main/md/golang/how_to_learn_golang.md b/src/main/md/golang/how_to_learn_golang.md new file mode 100644 index 00000000..c74c1837 --- /dev/null +++ b/src/main/md/golang/how_to_learn_golang.md @@ -0,0 +1,8 @@ +* [Tour of Go](https://tour.golang.org/welcome/1) +* [Go by Example](https://gobyexample.com) +* [Effective Go](https://golang.org/doc/effective_go.html) +* [Go naming tips](https://peter.bourgon.org/blog/2019/04/24/go-naming-tips.html) +* [Clear is better than clever](https://dave.cheney.net/2019/07/09/clear-is-better-than-clever) + +## Channel +* [Gophers](https://gophers.slack.com/?redir=%2Fmessages%2Fgeneral%2F) diff --git a/src/main/md/high_paid_jobs.md b/src/main/md/high_paid_jobs.md index 6091a2e1..d439cc04 100644 --- a/src/main/md/high_paid_jobs.md +++ b/src/main/md/high_paid_jobs.md @@ -5,5 +5,16 @@ * A programmer wants to judge himself/herself by a single language then they're already replaceable * Being subject-matter expert in a area where industry has highest demand would be a boon. (Ex: Machine learning expert in 2015~2027) +# Pay Negotiations +* What would it take for me to earn a raise in the future? +* Rather than directly asking for more money, Pose the question about where I fell in the spectrum of other workers? + +# Against Counter Offer +* Between 75% to 90% of people who accept a counteroffer leave the company within a year anyway (source???) +* The circumstances that caused you to look in the first place will not change if you take a counteroffer, the circumstances will just repeat in the future. + +# To Counter Offer +* Recruiters NEVER benefit from an accepted counteroffer. + References * StackOverflow (https://insights.stackoverflow.com/survey/2017#top-paying-technologies) \ No newline at end of file diff --git a/src/main/md/img/microservices_architecture_2.png b/src/main/md/img/microservices_architecture_2.png new file mode 100644 index 00000000..58b2de9e Binary files /dev/null and b/src/main/md/img/microservices_architecture_2.png differ diff --git a/src/main/md/img/ycombinator.svg b/src/main/md/img/ycombinator.svg new file mode 100644 index 00000000..9c8c8e00 --- /dev/null +++ b/src/main/md/img/ycombinator.svg @@ -0,0 +1,40 @@ + +{\displaystyle {\textsf {Y}}=\lambda f.(\lambda x.f\ (x\ x))\ (\lambda x.f\ (x\ x))} + + + \ No newline at end of file diff --git a/src/main/md/improve_programming.md b/src/main/md/improve_programming.md deleted file mode 100644 index de582938..00000000 --- a/src/main/md/improve_programming.md +++ /dev/null @@ -1,4 +0,0 @@ -* Working with people far smarter than myself -* Always listening to what others have to say, regardless if they're junior, intermediate, senior or guru. job title doesn't mean anything. -* Half of everything you know will be obsolete in 18-24 months -* \ No newline at end of file diff --git a/src/main/md/markets_banking/CollateralManagement.md b/src/main/md/markets_banking/CollateralManagement.md new file mode 100644 index 00000000..97563a6f --- /dev/null +++ b/src/main/md/markets_banking/CollateralManagement.md @@ -0,0 +1,131 @@ +* Collateral is one of the way to mitigate the credit risk +* Collateral - Something a borrower is expected to put up to indemnify the lender against price movements or counterparty risks. (Example: Cash, Equities or Bonds) +* Credit risk exists when counterparty has an obligation to make payments or deliveries in future +* First leg - Initiation of Collateral Agreement + * Borrower offers securities as collateral + * Lender - lends cash +* Second leg - Termination of Collateral Agreement + * Borrower - returns cash + interest + * Lender - hands over collateralized securities +* Collateral Management + * Agreement setup + * Collateral allocation + * Margin Call & Margin Management + * Confirmations + * Inventory management + * Dispute management + * Data management + * Portfolio reoncilliations + * Interest +* OTC derivatives should be centrally cleared and reported using CCP (Central counter party) +* Collateral advantages + * Reduced credit risk + * Capital savings (risk weighted assets) + * Access to higher risk trades + * More efficient trading between counterparties + * More STP transactions without hassle of credit review and settlements +* Collateral disadvantages + * Increases operational risk - false sense of security + * Increases the contract cost + * Legal procedure, Perfection risk, review risk, enforcement risk, priority risk, recharacterisation risk + * Settlement risk, Liquidity risk + * Concentration risk + * Can reduce trading activity + * Pricing risk and model risk + * Expensive +* Collateral Eligibility + * Liquid (High liquid and marketable) + * Easy to settle (Treasury bonds, AAA corporate bonds, large-cap equities, and many mortgage-backed bonds) + * High quality (Investment grade - S&P AAA-BBB, Moddy's Aaa-Baa) +* Collateral currency priority - USD, EUR, GBP, JPY and others +* As of 2013, Bonds that were collateral + * US Govt bonds - 33% + * JPY Govt bonds - 22% + * EUR Govt bonds - 19% +* As of 2013, Most prefered collateral + * Govt bonds - 25% + * Corporate bonds - 27% + * Equities - 25% + * LC - 5% +* Enterprise view of collateral: ![Enterprise view of collateral][ev] +* Exposure management + * Derived from Collateral agreement + * Trade recognition, Trades allocation to agreements, exposure netting across portfolio + * Who collects dividend of the equities that was given as Collateral + * Who pays tax for dividend? should we handover the equity and transfer to other security to avoid tax +* Collateral Management + * Trades that has credit risk alone requires Collateral + * When to place margin call defined in agreements + * Netting across portfolio for a counterparty may be allowed +* Collateral + * May be deposited to 3rd party custodian + * Cleared by CCP +* Agreements + * Master agreement - ISDA + * CSA + * GMRA for repo, MSFTA, reinsurance agreements +* Collateral management front to bank: ![Collateral management front to bank][cmf2b] +* CSA + * Margin call - A request type to post additional collateral + * Initial Margin + * Variation Margin - when underlying trade value changes, any of the both the party has to pay to cover the gain + * Market to Market - CCP does this + * Mark to Model - Finding intrisic value using some model to find if we are ITM/OTM (Black Scholes) + * Haircut - reduce the value from the mark to market (it is preagreed %) or reduce the value of underlying asset + * Threashold Amount - Amount of credit risk that parties ready to accept + * Substitution - Replace collateral with other collateral (Divident avoid for tax) + * Rehypothecation - Allowing collateral to be traded in the market, but we need by and return collateral later + * CCP - (Hypothecation) Buyer for seller, seller for buyer + * Netting + * Minimum transfer amount - Lowest transaction of collateral transanction + * Independent Amount - The amount of collateral required over and above the mark to market of a portfolio. It is designed to cater for changes in the market value of a portfolio between margin calls + * Valuations and Timings + * Valuation Agent, Date, Time and Notification Time + * Termination Events + * Illegality, Tax Event, Tax Event upon Merger, Credit Event Upong Merger +* When if there is a dispute? + * What is disputed? + * What is not disputed that should be settled +* Why CCP? + * When there are multiple banks, when one bank goes bankrupt, there could be dominos effect + * CCP should be the central counter party and dominos effect is avoided + * CCP capitalized to withstand dealer default + * All collateral moves to/from CCP +* When there are multiple asset class as collateral, how to utilize all sorts of asset classes effectively + * Cheapest to deliver (Try to use cheapest to deliver to borrow) + * Hardest to place (Try to use hardest place to borrow) + * Hardest to please (Try to use hardest please to borrow) +* Collateral management optimization: ![Collateral management optimization][cmo] +* Collateral management IT: ![Collateral management IT Architecture][cmit] + + + +# Stock borrowing and lending +* Fee or Rebate + * The cost to borrow/lend a financial instrument +* Borrow gets a quote for borrowing using broker +* Broker mediates and gets a quote from multiple lender +* Stock borrowing quote: ![Stock borrowing quote][sblq] + + + +[ev]: img/EnterpriseCM.PNG "Enterprise View" +[cmf2b]: img/CMF2B.PNG "Collateral Management Front to Bank" +[cmo]: img/CollateralOptimization.PNG "Collateral Optimization" +[cmit]: img/CMIT.PNG "Collateral IT Architecture" +[sblq]: img/SBL_Quote.PNG "Stock borrowing" + +# References +* [Collateral Management Part 1 Basics] (https://www.youtube.com/watch?v=zt4WQqu1qqQ) +* [Collateral Management Part 3](https://www.youtube.com/watch?v=MmSVQjQQFgA) +* [SBL 101](https://www.youtube.com/watch?v=OAPOqh3v1Ak) +* [SBL 102](https://www.youtube.com/watch?v=tKvATCGFlY0) +* [Investment Grade](https://www.google.com/search?tbm=isch&q=investment+grade) +* [Bond Rating](https://www.google.com/search?tbm=isch&q=bond+rating) +* [Calypso Workflows & Messaging](http://www2.calypso.com/Portals/0/Documents/Brochures/Calypso-Collateral-Management.pdf) +* [AcadiaSoft Hub](http://www.acadiasoft.com/wp-content/uploads/2017/04/Protocoll-Fact-Sheet-Final.pdf) +* AcadiaSoft, Inc. is a provider of margin automation for counterparties engaged in collateral management worldwide. +* (Portfolio reconciliation)[https://www.trioptima.com/triresolve/portfolio-reconciliation/] +* (UMR|Margin for non-cleared OTC derivatives)[https://www.ey.com/Publication/vwLUAssets/ey-margin-policy-for-non-cleared-otc-derivatives/$FILE/ey-margin-policy-for-non-cleared-otc-derivatives.pdf] +* (Uncleared Margin Rules)[https://www.bnymellon.com/_global-assets/pdf/markets-group/adapting-to-the-new-uncleared-margin-rules-global.pdf] +* (Uncleared margin rules in EU and US)[https://www.fieldfisher.com/media/5054275/margin-rules-12-16-v4.pdf] \ No newline at end of file diff --git a/src/main/md/markets_banking/Rehypothecation.md b/src/main/md/markets_banking/Rehypothecation.md new file mode 100644 index 00000000..9358f686 --- /dev/null +++ b/src/main/md/markets_banking/Rehypothecation.md @@ -0,0 +1,24 @@ +## Hypothecation +* Hypothecation occurs when an asset is pledged as collateral to secure loan + * Title and ownership still lies with borrower + * Ownership would be transferred if terms of agreement were not met by borrower + * When security buyers buy on margin or sell-short, they agree securities can be sold if necessary if there is a margin call + +## Rehypothecation +* Banks would make use of client's asset that was posted as Collateral +* Clients should permit to make use of their collateral to be used as Rehypothecatable +* Examples: + * Hedge fund posted its asset as Collateral to prime-broker, that asset being used by PM for its own transaction + + +## Margin account +* It is a brokerage account, where broker *lends* money for customer to buy securities + * The loan in the account is collateralized by securities purchased and cash. + * It has interest rate for the loan that was exercized. + * Margin accounts are used as source of fund using existing securities + +## Rehypothecation risks +* What happens if Banks that rehypothecated client's asset (or broker) default, and client would end-up in ITM? +* + +* [Rehypothecation](https://www.investopedia.com/terms/r/rehypothecation.asp) \ No newline at end of file diff --git a/src/main/md/markets_banking/img/CMF2B.PNG b/src/main/md/markets_banking/img/CMF2B.PNG new file mode 100644 index 00000000..d1b8141e Binary files /dev/null and b/src/main/md/markets_banking/img/CMF2B.PNG differ diff --git a/src/main/md/markets_banking/img/CMIT.PNG b/src/main/md/markets_banking/img/CMIT.PNG new file mode 100644 index 00000000..35debb57 Binary files /dev/null and b/src/main/md/markets_banking/img/CMIT.PNG differ diff --git a/src/main/md/markets_banking/img/CollateralOptimization.PNG b/src/main/md/markets_banking/img/CollateralOptimization.PNG new file mode 100644 index 00000000..2645f774 Binary files /dev/null and b/src/main/md/markets_banking/img/CollateralOptimization.PNG differ diff --git a/src/main/md/markets_banking/img/EnterpriseCM.PNG b/src/main/md/markets_banking/img/EnterpriseCM.PNG new file mode 100644 index 00000000..9fcddb2a Binary files /dev/null and b/src/main/md/markets_banking/img/EnterpriseCM.PNG differ diff --git a/src/main/md/markets_banking/img/SBL_Quote.PNG b/src/main/md/markets_banking/img/SBL_Quote.PNG new file mode 100644 index 00000000..767d5d2f Binary files /dev/null and b/src/main/md/markets_banking/img/SBL_Quote.PNG differ diff --git a/src/main/md/markets_banking/possible_reports.md b/src/main/md/markets_banking/possible_reports.md new file mode 100644 index 00000000..3f1d0a7a --- /dev/null +++ b/src/main/md/markets_banking/possible_reports.md @@ -0,0 +1,2 @@ +* Top 20 product that are traded and draw distribution - 1 diagram +* Top 20 product that are traded for top 10 counterparties - 20 diagram \ No newline at end of file diff --git a/src/main/md/massively_distributed/Operation_Patterns_SRE.md b/src/main/md/massively_distributed/Operation_Patterns_SRE.md new file mode 100644 index 00000000..c35e3958 --- /dev/null +++ b/src/main/md/massively_distributed/Operation_Patterns_SRE.md @@ -0,0 +1,25 @@ +## SLO and Error budget + +* Error budget to determine acceptable risk and make informed decisions about when changes should be made. The error budget is a limit on how much time the system is allowed to be down, defined by the contracted service-level agreement (SLA) or the intended service-level objective (SLO). Many clients stop new releases when they might miss the service-level agreement (SLA). Error budget goes a step further and encourages testing and releasing only if downtime is left in the SLA. + +* Tools for operational metrics + * Prometheus (Event and alert monitoring System) + * Prometheus is not intended as a dashboarding solution. Although it can be used to graph specific queries, it is not a full-fledged dashboarding solution and needs to be hooked up with Grafana to generate dashboards; this has been cited as a disadvantage due to the additional setup complexity + * Data storage format - The ability to specify an arbitrary list of labels and to query based on these in real time is why Prometheus' data model is called multi-dimensional. + * Prometheus collects data in the form of time series. The time series are built through a pull model: the Prometheus server queries a list of data sources (sometimes called exporters) at a specific polling frequency. + * Prometheus provides its own query language PromQL (Prometheus Query Language) that lets users select and aggregate data. PromQL is specifically adjusted to work in convention with a Time-Series Database and therefore provides time-related query functionalities. + * Promethus vs (Datadog, elk, nagios, dynatrace) + * Grafana - Grafana is the open source analytics and monitoring solution for every database + * Grafana was designed to work as a UI for analyzing metrics. Can be alternative to Kibana + * Dashboard, Datasource, Grpah, Panel, Plugin, Query, Timeseries and Visualization + * Grafana supports querying Prometheus. The Grafana data source for Prometheus is included since Grafana 2.5.0 (2015-10-28). + * Grafana requires a database to store its configuration data, such as users, data sources, and dashboards. MySql, Postgres and SqlLite are currently supported + * [Grafana Glossary] (https://grafana.com/docs/grafana/latest/guides/glossary/) + * [Grafana Data Source] (https://grafana.com/docs/grafana/latest/features/datasources/) + * Zipkin + * Zipkin is a distributed tracing system. It helps gather timing data needed to troubleshoot latency problems in service architectures. Features include both the collection and lookup of this data. + * Zipkin also has an instrumentation library to instrument other libraries to support tracing + +* [Prometheus_(software)](https://en.wikipedia.org/wiki/Prometheus_software) +* [ZIPKIN TUTORIAL: GET STARTED EASILY WITH DISTRIBUTED TRACING](https://www.scalyr.com/blog/zipkin-tutorial-distributed-tracing/) +* [Spring Cloud – Tracing Services with Zipkin](https://www.baeldung.com/tracing-services-with-zipkin) \ No newline at end of file diff --git a/src/main/md/massively_distributed/Resiliency_Patterns.md b/src/main/md/massively_distributed/Resiliency_Patterns.md new file mode 100644 index 00000000..f1b2be03 --- /dev/null +++ b/src/main/md/massively_distributed/Resiliency_Patterns.md @@ -0,0 +1,36 @@ +# Resiliency patterns like circuit breakers + +## Circuit Breakers + +* The goal of a circuit breaker, much like you would find in an electrical panel, is to stop the failure from cascading. When you drop a hair dryer in a bath tub, the breaker pops and it shuts the circuit off. The damage is limited to that one item and not the rest of the electrical system. + +* Latency is a Form of Failure, Latency means that something takes too long. Latency is the same as failure. If it takes you five minutes to save a work item, we’re down. + +* As calls come in and go through the circuit breaker, normally the circuit breaker is closed. Normally things are flowing through. The breaker is looking at the failure rates. When the failure rates exceed some percentage in a given window of time with a certain volume, the breaker opens. And when it opens it just starts failing calls. You may have a problem in the code and that problem might have been triggered by somebody’s behavior, but we’re going to start failing all those calls to save the system. Circuit breakers are all about saving the system to prevent failure from spreading through the rest of system. This is a blunt instrument but effective. + + +## Bulkheads (Partition) + +* Elements of an application are isolated into pools so that if one fails, the others will continue to function. It's named after the sectioned partitions (bulkheads) of a ship's hull. + +### Issues and considerations + +* Consider combining bulkheads with retry, circuit breaker, and throttling patterns to provide more sophisticated fault handling. +When partitioning consumers into bulkheads, consider using processes, thread pools, and semaphores. Projects like Netflix Hystrix and Polly offer a framework for creating consumer bulkheads. + +* When partitioning services into bulkheads, consider deploying them into separate virtual machines, containers, or processes. Containers offer a good balance of resource isolation with fairly low overhead. + + +## Back-Pressure + +* When one component is struggling to keep-up, the system as a whole needs to respond in a sensible way. It is unacceptable for the component under stress to fail catastrophically or to drop messages in an uncontrolled fashion. Since it can’t cope and it can’t fail it should communicate the fact that it is under stress to upstream components and so get them to reduce the load. This back-pressure is an important feedback mechanism that allows systems to gracefully respond to load rather than collapse under it. The back-pressure may cascade all the way up to the user, at which point responsiveness may degrade, but this mechanism will ensure that the system is resilient under load, and will provide information that may allow the system itself to apply other resources to help distribute the load, see Elasticity. + +* One of the way of managing backpressure may be through the rate-limiting technique. The limitRate(n) operator splits the downstream demand into smaller batches not bigger than n. In this way, we may protect our delicate producer from an unjustified data request from a downstream consumer. + +* Producer/upstream - should have a way asses the situation and throttle limit, it can have api to accept feedback from downstream + +* RxJava supports backpressure + +* [Patterns for Resiliency in the Cloud - Circuit Breakers] (https://docs.microsoft.com/en-us/azure/devops/learn/devops-at-microsoft/patterns-resiliency-cloud) +* [Cloud Design Patterns - Bulk Head] (https://docs.microsoft.com/en-us/azure/architecture/patterns/bulkhead) +* [Resilence4j - ~~Hystrix~~](https://github.com/resilience4j/resilience4j) \ No newline at end of file diff --git a/src/main/md/massively_distributed/index.md b/src/main/md/massively_distributed/index.md new file mode 100644 index 00000000..bd7b3176 --- /dev/null +++ b/src/main/md/massively_distributed/index.md @@ -0,0 +1,17 @@ +# Large scale systems +* [Designing Schemaless, Uber Engineering’s Scalable Datastore Using MySQL](https://eng.uber.com/schemaless-part-one/) +* [SmartStack: Service Discovery in the Cloud](https://medium.com/airbnb-engineering/smartstack-service-discovery-in-the-cloud-4b8a080de619) + +# facebook engineering +* [Wormhole pub/sub system: Moving data through space and time](https://www.facebook.com/notes/facebook-engineering/wormhole-pubsub-system-moving-data-through-space-and-time/10151504075843920/) +* [McDipper: A key-value cache for Flash storage](https://www.facebook.com/notes/facebook-engineering/mcdipper-a-key-value-cache-for-flash-storage/10151347090423920/) +* [Scaling memcache at Facebook](https://code.facebook.com/posts/376799972445933/scaling-memcache-at-facebook/) + +# Other engineering blogs +* [Cherami: Uber Engineering’s Durable and Scalable Task Queue in Go](https://eng.uber.com/cherami/) +* [uReplicator: Uber Engineering’s Robust Kafka Replicator](https://eng.uber.com/ureplicator/) + +# Big Data, large scale architecture +* [Observability at Twitter: technical overview, part I](https://blog.twitter.com/engineering/en_us/a/2016/observability-at-twitter-technical-overview-part-i.html) +* [Observability at Twitter: technical overview, part II](https://blog.twitter.com/engineering/en_us/a/2016/observability-at-twitter-technical-overview-part-ii.html) +* [Flying faster with Twitter Heron](https://blog.twitter.com/engineering/en_us/a/2015/flying-faster-with-twitter-heron.html \ No newline at end of file diff --git a/src/main/md/mobile_tv_movie_streamin_app.md b/src/main/md/mobile_tv_movie_streamin_app.md new file mode 100644 index 00000000..0ba6cf80 --- /dev/null +++ b/src/main/md/mobile_tv_movie_streamin_app.md @@ -0,0 +1,7 @@ +kodi +hd movies +einthusan +ptv +apk land +livenet +swift streamz \ No newline at end of file diff --git a/src/main/md/music/songs.md b/src/main/md/music/songs.md new file mode 100644 index 00000000..4a588f0f --- /dev/null +++ b/src/main/md/music/songs.md @@ -0,0 +1,2 @@ +* Chhote Chhote Peg - Feb 2018 +* Mummy kasam - Nawabzaade \ No newline at end of file diff --git a/src/main/md/mysql.md b/src/main/md/mysql.md new file mode 100644 index 00000000..d0d97068 --- /dev/null +++ b/src/main/md/mysql.md @@ -0,0 +1,129 @@ +```ini +#D:\Apps\mysql-8.0.13-winx64\my.ini +[mysqld] +port=3306 +basedir=D:\\Apps\\mysql-8.0.13-winx64 +datadir=D:\\Apps\\mysql-8.0.13-winx64\\data +max_allowed_packet=16M + +[client] +port=3306 +``` + +# Initialize database in insecure-more +```bash +D:\Apps\mysql-8.0.13-winx64\bin\mysqld --defaults-file=D:\Apps\mysql-8.0.13-winx64\my.ini --initialize-insecure --console + +mysqld +mysql -u root -p +CREATE USER 'euler'@'localhost' IDENTIFIED BY 'euler'; +GRANT ALL PRIVILEGES ON * . * TO 'euler'@'localhost'; +FLUSH PRIVILEGES; +GRANT type_of_permission ON database_name.table_name TO ‘username’@'localhost’; +REVOKE type_of_permission ON database_name.table_name FROM ‘username’@‘localhost’; +SHOW GRANTS username; +``` + +CREATE DATABASE NIKIAS CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; +GRANT ALL PRIVILEGES ON NIKIAS.* TO 'euler'@'localhost'; + +# Change root password (MySQL 8.0.14 on Windows) +``` +mysql -u root +use mysql; +ALTER USER 'root'@'localhost' IDENTIFIED WITH mysql_native_password BY 'root'; +FLUSH PRIVILEGES; +``` + +# Now the password was changed, start database normally: + +\bin\mysqld +# ACCESS database shell +\bin\mysql -u root -p + +```SQL +mysqld --initialize-insecure set password='root' +mysqld --console --port 3306 +mysql -u root -p +CREATE DATABASE thales CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; +``` + + +# Most useful MySQL SQL +```sql + +SELECT table_name FROM information_schema.tables where table_schema='emp'; + +SELECT * +FROM information_schema.tables +WHERE table_type = 'BASE TABLE' + AND table_schema = 'thales' + +SELECT columns.* +FROM information_schema.columns columns, + information_schema.tables tables +WHERE tables.table_schema = 'thales' + AND columns.table_name = tables.table_name + +ALTER TABLE perforredingtonvmwaretrivandrumheldon2registrationreport ALTER j SET DEFAULT 1000; + +ALTER TABLE thales.perforredingtonvmwaretrivandrumheldon2registrationreport +ADD COLUMN TS DATETIME DEFAULT 20190101020000 AFTER source; +``` + +\bin\mysqld --init-file=\my.ini + +# MYSQL +```sql +mysql> use mysql; +mysql> show tables; +mysql> show databases; +mysql> describe tableName; +mysql> CREATE DATABASE thales; +mysql> CREATE USER 'project_user'@'localhost' IDENTIFIED BY 'PASSWORD'; +mysql> GRANT ALL ON `project_database`.* TO 'project_user'@'localhost'; +``` + +## Unexpected Error occurred attempting to open an SQL connection. +class com.mysql.cj.exceptions.InvalidConnectionAttributeException: The server time zone value 'Malay Peninsula Standard Time' is unrecognized or represents more than one time zone. You must configure either the server or JDBC driver (via the serverTimezone configuration property) to use a more specifc time zone value if you want to utilize time zone support. +``` +jdbc:mysql://localhost:3306/thales?serverTimezone=UTC +``` + +## Mysql Reset Root password +```bash +net stop mysql +mysqld --defaults-file="..." --skip-grant-tables +mysql (Another parallel session) +select user,host,authentication_string from mysql.user; +-- reset authentication_string (password before 5.5.7) for the admin user ... +update mysql.user +set authentication_string=PASSWORD('new_pass'),password_expired='N', where user='root'; +flush privileges; +exit; (Another parallel session - stops) +mysqladmin shutdown +net stop mysql +``` + +# JDBC configuration to useSSL +clientCertificateKeyStoreUrl=file:path_to_truststore_file +clientCertificateKeyStorePassword=mypassword +-Djavax.net.ssl.trustStore=path_to_truststore_file +-Djavax.net.ssl.trustStorePassword=mypassword +System.setProperty("javax.net.ssl.trustStore","path_to_truststore_file"); +System.setProperty("javax.net.ssl.trustStorePassword","mypassword"); + +jdbc:mysql://example.com:3306/MYDB?verifyServerCertificate=true&useSSL=true&requireSSL=true&clientCertificateKeyStoreUrl=file:cert/keystore.jks&clientCertificateKeyStorePassword=123456&trustCertificateKeyStoreUrl=file:cert/truststore.jks&trustCertificateKeyStorePassword=123456 + +verifyServerCertificate=true +useSSL=true +requireSSL=true +clientCertificateKeyStoreUrl=file:cert/keystore.jks +clientCertificateKeyStorePassword=123456 +trustCertificateKeyStoreUrl=file:cert/truststore.jks +trustCertificateKeyStorePassword=123456 + +#[JDBC UseSSL](https://dev.mysql.com/doc/connector-j/5.1/en/connector-j-reference-using-ssl.html) + +# Refernces +* [Mysql manual] (http://g2pc1.bu.edu/~qzpeng/manual/MySQL%20Commands.htm) diff --git a/src/main/md/scala/excercise.md b/src/main/md/scala/excercise.md new file mode 100644 index 00000000..4a26f040 --- /dev/null +++ b/src/main/md/scala/excercise.md @@ -0,0 +1,3 @@ +* Use super simple Free monad + * Define Flatmap on it. + * \ No newline at end of file diff --git a/src/main/md/scala/learning_sclaz.md b/src/main/md/scala/learning_sclaz.md index dd6b35e0..15e9cf40 100644 --- a/src/main/md/scala/learning_sclaz.md +++ b/src/main/md/scala/learning_sclaz.md @@ -55,9 +55,10 @@ implicit val OptionEach: Each[Option] = new Each[Option] { * To define new monoid for custom type, define mzero[type] and semiGroup[type] * Monoids for Float and Double break the laws due to how they are represented, hence they are not monoids * Create monoid for Currency +```Scala implicit def MoneyZero(implicy ccy: Currency): Zero[Money] = zero(Money.zero(ccy)) implicit val ccySemiGroup: SemiGroup[Money] = semiGroup[Money] (_ add _) - +``` ```Scala 1 |+| 2 diff --git a/src/main/md/scala/recursion_scheme.md b/src/main/md/scala/recursion_scheme.md new file mode 100644 index 00000000..5e2ece68 --- /dev/null +++ b/src/main/md/scala/recursion_scheme.md @@ -0,0 +1,165 @@ +#[Original link](https://github.com/xgrommx/awesome-functional-programming/blob/master/rsfa.md) +## Recursion schemes, Recursion, F-{Co}algebras + +#### Articles, Resources + +* [Reflecting On Incremental Folds](http://comonad.com/reader/2009/incremental-folds) +* [Elgot (Co)Algebras](http://comonad.com/reader/2008/elgot-coalgebras) +* [Generatingfunctorology](http://comonad.com/reader/2008/generatingfunctorology) +* [Dynamorphisms as Chronomorphisms](http://comonad.com/reader/2008/dynamorphisms-as-chronomorphisms) +* [Unnatural Transformations](http://comonad.com/reader/2008/unnatural-transformations) +* [Generalized Hylomorphisms](http://comonad.com/reader/2008/generalized-hylomorphisms) +* [Rotten Bananas](http://comonad.com/reader/2008/rotten-bananas) +* [Higher-Order Abstract Syntax à la Carte](http://comonad.com/reader/2008/higher-order-abstract-syntax-a-la-carte) +* [Catamorphisms](https://www.schoolofhaskell.com/user/edwardk/recursion-schemes/catamorphisms) +* [Hylomorphism(その2)](http://d.hatena.ne.jp/m-a-o/20061117/p1) +* [Recursion schemeとHaskell](https://qiita.com/fumieval/items/01c7d7f93e261f843ef5) +* [Compositional data types](http://www.itu.dk/~paba/modularity/pubs/talks/bahr11wgp.html) +* [passy/awesome-recursion-schemes](https://github.com/passy/awesome-recursion-schemes) +* [Recursion Schemes](https://notepad.mmakowski.com/Recursion%20Schemes?revision=1a0cbb1a636e157a26f6a7175e91328cf26a2573) +* [Hylomorphisms in Haskell](https://ulissesaraujo.wordpress.com/2009/04/09/hylomorphisms-in-haskell/) +* [More Hylomorphisms in Haskell](https://ulissesaraujo.wordpress.com/2009/04/09/more-hylomorphisms-in-haskell/) +* [(Cata|Ana|Hylo|Para)morphisms](http://blog.nonowarn.jp/post/161397967/cata-ana-hylo-paramorphisms) +* [Dynamorphism 〜 Haskellでも動的計画法がしたい! 〜](http://titech-ssr.blog.jp/archives/1047835805.html) +* [You Can’t Make an Algebra without Breaking a Few Eggs](https://bartoszmilewski.com/2014/01/28/you-cant-make-an-algebra-without-breaking-a-few-eggs/) +* [Understanding Algebras](https://www.schoolofhaskell.com/user/bartosz/understanding-algebras) +* [Constructions on Typeclasses, Part 1: F-Algebras](https://lukepalmer.wordpress.com/2013/03/12/constructions-on-typeclasses-part-1-f-algebras/) +* [Type-level Fix and generic folds](http://mainisusuallyafunction.blogspot.com/2010/12/type-level-fix-and-generic-folds.html) +* [Don't fear the cat-amorphism (nor the hylomorphism)](http://fho.f12n.de/posts/2014-05-07-dont-fear-the-cat.html) +* [Grokking recursion-scheme: Part 1](https://jozefg.bitbucket.io/posts/2014-05-19-like-recursion-but-cooler.html) +* [Grokking recursion-schemes: Part 2](https://jozefg.bitbucket.io/posts/2014-06-14-like-recursion-but-cooler-2.html) +* [Fixpoints and Iso-recursive Types](https://jozefg.bitbucket.io/posts/2013-11-09-iso-recursive-types.html) +* [Functors and Recursion](https://jozefg.bitbucket.io/posts/2014-11-19-recursion.html) +* [Origami](https://www.cs.ox.ac.uk/jeremy.gibbons/publications/origami.pdf) +* [Catamorphisms in 15 Minutes!](http://chrislambda.github.io/blog/2014/01/30/catamorphisms-in-15-minutes/) +* [A non-recursive sorting algorithm](http://haskellexists.blogspot.com/2014/09/a-non-recursive-sorting-algorithm.html) +* [PHOAS For Free](https://www.schoolofhaskell.com/user/edwardk/phoas) +* [Moore for Less](https://www.schoolofhaskell.com/user/edwardk/moore/for-less) +* [Algebras and Coalgebras](https://www.schoolofhaskell.com/user/tel/algebras-coalgebras) +* [Feval: F-Algebras for expression evaluation](http://www.burz.io/2014/06/15/feval.html) +* [Recursion Schemes: A Field Guide (Redux)](http://comonad.com/reader/2009/recursion-schemes/) +* [Rotating Squares](https://jtobin.io/rotating-squares) +* [Promorphisms, Pre and Post](https://jtobin.io/promorphisms-pre-post) +* [Recursive Stochastic Processes](https://jtobin.io/recursive-stochastic-processes) +* [Time Traveling Recursion Schemes](https://jtobin.io/time-traveling-recursion) +* [Monadic Recursion Schemes](https://jtobin.io/monadic-recursion-schemes) +* [Sorting Slower with Style](https://jtobin.io/sorting-slower-with-style) +* [Yo Dawg We Heard You Like Derivatives](https://jtobin.io/ad-via-recursion-schemes) +* [A Tour of Some Useful Recursive Types](https://jtobin.io/tour-of-some-recursive-types) +* [Sorting with Style](https://jtobin.io/sorting-with-style) +* [Practical Recursion Schemes](https://jtobin.io/practical-recursion-schemes) +* [Guide to morphism](http://b-studios.de/assets/guide-to-morphisms.pdf) +* [Bananas, Lenses, Envelopes, and Barbed Wire](http://etymon.blogspot.com/2004/08/bananas-lenses-envelopes-and-barbed.html) +* [Recursion Schemes (wiki)](https://en.wikipedia.org/wiki/Category:Recursion_schemes) +* [Understanding F-Algebras](https://bartoszmilewski.com/2013/06/10/understanding-f-algebras/) +* [Zygohistomorphic prepromorphisms](https://wiki.haskell.org/Zygohistomorphic_prepromorphisms) +* [An Introduction to Recursion Schemes](http://blog.sumtypeofway.com/an-introduction-to-recursion-schemes/) +* [Recursion Schemes, Part II: A Mob of Morphisms](http://blog.sumtypeofway.com/recursion-schemes-part-2/) +* [Recursion Schemes, Part III: Folds in Context](http://blog.sumtypeofway.com/recursion-schemes-part-iii-folds-in-context/) +* [Recursion Schemes, Part IV: Time is of the Essence](http://blog.sumtypeofway.com/recursion-schemes-part-iv-time-is-of-the-essence/) +* [Hylomorphisms and treesort](http://www.kovach.me/posts/2014-04-30-hylomorphisms-and-treesort.html) +* [Anamorphisms in JavaScript](http://raganwald.com/2016/11/30/anamorphisms-in-javascript.html) +* [Bananas, Lenses, Envelopes and Barbed Wire A Translation Guide](http://blog.ezyang.com/2010/05/bananas-lenses-envelopes-and-barbed-wire-a-translation-guide/) +* [Пределы выразительности свёрток](http://fprog.ru/2010/issue4/vitaly-bragilevsky-limits-of-folds-expressiveness/) +* [Why Recursive Data Structures?](http://raganwald.com/2016/12/27/recursive-data-structures.html) +* [F-algebras](https://bartoszmilewski.com/2017/02/28/f-algebras/) +* [CheatSheet](https://github.com/sellout/recursion-scheme-talk/blob/master/cheat%20sheet.pdf) +* [F-algebra(wiki)](https://wiki2.org/en/F-algebra) +* [Anamorphism Example](https://idontgetoutmuch.wordpress.com/2011/11/12/anamorphism-example/) +* [Least fixed point](https://wiki2.org/en/Least_fixed_point) +* [data Mu f = In (f (Mu f))](http://blog.plover.com/prog/springschool95-2.html) +* [Fixed point (mathematics)](https://wiki2.org/en/Fixed_point_(mathematics)) +* [Fixed point](https://ncatlab.org/nlab/show/fixed+point) +* [Fixed-point combinator](https://wiki2.org/en/Fixed-point_combinator) +* [Initial algebra](https://wiki2.org/en/Initial_algebra) +* [Категориальные типы](http://anton-k.github.io/ru-haskell-book/book/16.html) +* [Структурная рекурсия](http://anton-k.github.io/ru-haskell-book/book/12.html) +* [A Classy Approach to Recursion](http://fieldstrength.org/posts/2016-10-11-Classy-Recursion.html) +* [Fixing GADTs](http://www.timphilipwilliams.com/posts/2013-01-16-fixing-gadts.html) +* [Reasoning about Stream Processing with Effects](http://bentnib.org/posts/2012-01-06-streams.html) +* [Mutual Recursion in Final Encoding](https://aherrmann.github.io/programming/2016/05/28/mutual-recursion-in-final-encoding/) +* [Recursion Schemes](https://notepad.mmakowski.com/Recursion%20Schemes?revision=1a0cbb1a636e157a26f6a7175e91328cf26a2573) +* [What's in a Fold: The Basic Catamorphism in recursion-schemes](https://duplode.github.io/posts/whats-in-a-fold.html) +* [Introduction to Recursion Schemes with Matryoshka](http://akmetiuk.com/posts/2017-03-10-matryoshka-intro.html) +* [Encoding for least fixpoint](http://haskell-cafe.haskell.narkive.com/47SvrSB3/encoding-for-least-fixpoint) +* [A non-recursive sorting algorithm](http://haskellexists.blogspot.com.tr/2014/09/a-non-recursive-sorting-algorithm.html) +* [Type-level Fix and generic folds](http://mainisusuallyafunction.blogspot.com.tr/2010/12/type-level-fix-and-generic-folds.html) +* [Recursion Schemes and Functors](https://www.interact-sw.co.uk/iangblog/2014/03/20/recursion-schemes-functors) +* [Fun With PolyKinds: Polykinded Folds](http://blog.functorial.com/posts/2012-02-02-Polykinded-Folds.html) +* [Про F-Алгебры](https://medium.com/@Nik_Yurchenko/%D0%BF%D1%80%D0%BE-f-%D0%B0%D0%BB%D0%B3%D0%B5%D0%B1%D1%80%D1%8B-b00706fb6c7f) +* [слово о трансдюсерах](http://thedeemon.livejournal.com/87320.html) +* [Mixing Supercompilers and Recursion Using Elgot Algebras](http://blog.vmchale.com/article/elgot) +* [Computing Catalan Numbers Using Dynamorphisms](http://blog.vmchale.com/article/dynamorphisms-idris) +* [Review: Bananas, Lenses, Envelopes and Barbed Wire](http://reasonablypolymorphic.com/blog/recursion-schemes) +* [Metamorphisms](https://patternsinfp.wordpress.com/2017/10/04/metamorphisms/) +* [recursion](https://github.com/jozefg/blog/blob/master/posts/2014-11-19-recursion.md) +* [Programming with bananas and barbed wire. Part 1](https://monad.cat/posts/2016-05-10-barbed-wire.html) +* [Category Theory Helpline](https://oleksandrmanzyuk.wordpress.com/2012/12/01/category-theory-helpline/) + +#### Talks, Presentations + +* [willtim/recursion-schemes](https://github.com/willtim/recursion-schemes) +* [sellout/recursion-scheme-talk](https://github.com/sellout/recursion-scheme-talk) +* [Going bananas with recursion schemes for fixed point data types](https://www.slideshare.net/paulszulc/going-bananas-with-recursion-schemes-for-fixed-point-data-types) +* [Deriving Modular Recursion Schemes from Tree Automata](http://www.itu.dk/people/paba/modularity/pubs/talks/bahr12uucs.html) +* [F-Algebras or: How I Learned to Stop Worrying and Love the Type System](https://github.com/burz/presentations/blob/master/f-algebras/slides.md) +* [Recursion Schemes - Why, How and More](https://github.com/sergv/kievfprog-2017-november/blob/master/Talk.pdf) +* [Peeling The Banana: Recursion schemes from first principles](http://slides.com/zainabali_/peeling_the_banana) +* [A talk given at FP-Syd about F-algebras with respect to Haskell](https://github.com/christian-marie/f-algebra-talk/blob/master/slides.pdf) + +#### Videos + +* [Recursion Schemes](https://www.youtube.com/watch?v=Zw9KeP3OzpU) +* [Programming with algebras - Bartosz Milewski](https://www.youtube.com/watch?v=-98fR9VmLbQ) +* [Peeling the Banana: Recursion Schemes from First Principles - Zainab Ali](https://www.youtube.com/watch?v=XZ9nPZbaYfE) +* [scala.bythebay.io: Greg Pfeil, This programmer modeled his code after wooden nesting dolls](https://www.youtube.com/watch?v=lQdpXqD7Uic) +* [Recursion: Where Functional Programming Hits Bottom - Greg Pfeil](https://www.youtube.com/watch?v=24UoRaoKLjM) +* [Going bananas with recursion schemes for fixed point data types - Paweł Szulc (Lambda Days 2017)](https://www.youtube.com/watch?v=IlvJnkWH6CA) +* [Jean Remi Desjardins: A Gentle Introduction to Recursion Schemes - λC 2016](https://www.youtube.com/watch?v=i5A2Amfcir8) +* [Unifying Structured Recursion Schemes](https://www.youtube.com/watch?v=9EGYSb9vov8) +* [Introduction to Recursion Schemes – Ratan Sebastian](https://www.youtube.com/watch?v=6m0J_XllHqQ) +* [Dániel Berényi: Selected use cases of structured recursion schemes](https://www.youtube.com/watch?v=0Od_smjZ_jU) +* [Pawel Szulc - Going bananas with recursion schemes for fixed point data types](https://www.youtube.com/watch?v=_Gfnis1cVNM) +* [ScalaMatsuri day2 A-3 Recursion schemes in Scala - Paweł Szulc](https://www.youtube.com/watch?v=UmwV5JGTJz0) +* [Efficient Nanopass Compilers using Cats and Matryoshka - Greg Pfeil](https://www.youtube.com/watch?v=TQIHRBXM75E) + + + +#### Examples, Repos + +* [jkoppel/compstrat](https://github.com/jkoppel/compstrat) +* [matt-noonan/fixie](https://github.com/matt-noonan/fixie) +* [kosmikus/multirec](https://github.com/kosmikus/multirec) +* [fixplate](https://hackage.haskell.org/package/fixplate) +* [matt-noonan/functor-friends](https://github.com/matt-noonan/functor-friends) +* [patrickt/recschemes - Example code for my blog series on recursion schemes](https://github.com/patrickt/recschemes) +* [isovector/lets-recurse](https://github.com/isovector/lets-recurse) +* [JD95/recursion-scheme-examples](https://github.com/JD95/recursion-scheme-examples) +* [compdata](https://hackage.haskell.org/package/compdata) +* [vmchale/recursion_schemes](https://github.com/vmchale/recursion_schemes) +* [sellout/Iaia](https://github.com/sellout/Iaia) +* [sellout/yaya](https://github.com/sellout/yaya) +* [mmisamore/lfp-recursion-schemes](https://github.com/mmisamore/lfp-recursion-schemes) +* [oisdk/recursion-schemes-extras](https://github.com/oisdk/recursion-schemes-extras) +* [romac/lfc-haskell](https://github.com/romac/lfc-haskell) +* [DrBoolean/excursion](https://github.com/DrBoolean/excursion) +* [kosmikus/bc7d29cdbdcdae699eab](https://gist.github.com/kosmikus/bc7d29cdbdcdae699eab) +* [Feval - Evaluation using F-algebras](https://github.com/burz/Feval) +* [The pointless-haskell package](https://hackage.haskell.org/package/pointless-haskell) +* [The data-fix package](https://hackage.haskell.org/package/data-fix) +* [The recursion-schemes package](https://hackage.haskell.org/package/recursion-schemes) +* [RamdaJS reduceBy() in Haskell using recursion-schemes](http://stackoverflow.com/questions/42148749/ramdajs-reduceby-in-haskell-using-recursion-schemes) +* [purescript-matryoshka - Generalized folds, unfolds, and traversals for fixed point data structures](https://github.com/slamdata/purescript-matryoshka) +* [gpif-datakinds - Generic Programming with Indexed Functors using DataKinds -- (co)recursion schemes for "free"!](https://github.com/pbl64k/gpif-datakinds) +* [fixalgs - Fixed points and F-(co)algebras](https://github.com/bhamrick/fixalgs) +* [algebraic-classes - Conversions between algebraic classes and F-algebras](https://github.com/sjoerdvisscher/algebraic-classes) +* [Algebra - Study and implementations of F-Algebras and Bird-Meertens formalism](https://github.com/Crazycolorz5/Algebra) +* [Examples of histomorphisms in Haskell](http://stackoverflow.com/questions/24884475/examples-of-histomorphisms-in-haskell/24892711#24892711) +* [histoexample - quick example of how to use a histomorphism to solve a dynamic programming problem](https://github.com/benjumanji/histoexample) +* [Coalgebra - Some simple things to help me with my homework and to play around with Haskell](https://github.com/Jaxan/Coalgebra) +* [Generalized recursion schemes and traversals for Scala](https://github.com/slamdata/matryoshka) +* [A couple of quick questions about recursion-schemes](https://www.reddit.com/r/haskell/comments/3q0w7a/a_couple_of_quick_questions_about_recursionschemes/) +* [recur v1.0 :provide recursion scheme combinators for Scala](https://github.com/MiloXia/recur) +* [Recursion.hs](https://www.eyrie.org/~zednenem/2004/hsce/Control/Recursion.hs) +* [chronomorphisms](https://gist.github.com/danidiaz/e5debcaf531838eb6e2afd3ef3b34d60) +* [katalyst - Kotlin recursion schemes with Kategory](https://github.com/aedans/katalyst) \ No newline at end of file diff --git a/src/main/md/scala/sbt.md b/src/main/md/scala/sbt.md new file mode 100644 index 00000000..16622fb5 --- /dev/null +++ b/src/main/md/scala/sbt.md @@ -0,0 +1,83 @@ +* Setting new project + ```bash + $ sbt new sbt/scala-seed.g8 + $ cd hello + $ sbt + > run + > exit + ``` +* Sample build.sbt + * ```build.sbt + lazy val root = (project in file(".")) + .settings( + name := "hello", + organization := "com.example", + scalaVersion := "2.12.2", + version := "0.1.0-SNAPSHOT" + ) + ``` + +* SBT Theory + * build definition is defined in build.sbt, it consists of a set of projects + * subproject is the term used to define project in sbt documentation + * subproject defined by sequence of key-value pairs called setting (task) expressions + * built-in Keys can be found at (https://www.scala-sbt.org/0.13/sxr/sbt/Keys.scala.html) + * SettingKey[T] - computed once, + * TaskKey[T] - recomputed each time potentially with side effects + * Tasks are operations such as compile or package. + * InputKey[T] - a key for a task that has command line arguments + * + + +* projectDirectory\project :=> can contain .scala, .sbt file, They define helper objects and one-off plugins +* projectDirectory\target :=> compiled classes, packaged jars, managed files, caches, and documentation +* .gitignore should contain "target/" (as-is). So target directory from project and root-directory would be ignored + +* Impact of proxy settting + * Sometimes better to disable/set proxy using following + * set HTTP_PROXY=;set HTTPS_PROXY= + * set JAVA_OPTS=-Dhttp.proxySet=true -Dhttp.proxyHost=proxy.com -Dhttp.proxyPort=8080 + +* sbt commands + * clean, compile, test, testQuick, console + * run * + * reload + * libraryDependencies + * [Some of the commands](https://www.scala-sbt.org/0.13.15/sxr/sbt/Main.scala.html#sbt.BuiltinCommands.ConsoleCommands) + +* sbt history commands + * !, !:, !:n + * !!, !n, !-n, !string, !?, !?, !string + +* SBT launcher + * java -jar build/sbt-launch.jar -Dsbt.override.build.repos=true -Dsbt.repository.config=project/sbt.repositories + * IVY Home can be set using :=> -Dsbt.ivy.home=C:/Users/mohan/.ivy2 + * EclipseKeys.withSource := true + * EclipseKeys.withJavadoc := true + +* Sbt plugins + * addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4") + * addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.1") + +* Sbt plugins {within scala console} + * reload plugins + * libraryDependencies + * reload return {/within scala console} + +* build.sbt settings + * logLevel := Level.Debug [Level](https://www.scala-sbt.org/0.13.15/sxr/sbt/Level.scala.html) + * scalaVersion := "2.11.8" + +* project/build.properties - Some settings are better at properties file, Predominantly used only for sbt.version since 0.9 sbt. + * sbt.version=0.13.17 + +* .sbt file + * .sbt file can contain vals and defs in addition to settings, + * vals and defs are evaluated before settings regardless of where they are defined in the file + * lazy vals are used instead of vals to avoid initialization order problems. + * lazy val hello = taskKey[Unit]("An example task") //sample for custom taskKey + * .settings( hello := { println("Hello!") } ) //invoke the task + * + +* Sbt basic usages + * sbt "~runMain ground.learning.App.FetchJavascriptStyleGuide"* diff --git a/src/main/md/scala/sbt_in_action.md b/src/main/md/scala/sbt_in_action.md new file mode 100644 index 00000000..37dcac32 --- /dev/null +++ b/src/main/md/scala/sbt_in_action.md @@ -0,0 +1,109 @@ +# Why SBT? +* A reactive development environment. (You change your source code, and sbt reruns your tests.) +* Allowing developers to use the Scala REPL on their projects. + +# Archaeology +* Ant - target and no workflow +* Maven - Workflow -> Phases(*) -> Targets (*) + * Opinionated workflow + * Maven lifecycle is a series of sequential phases. + * Each phase can contain zero or more tasks (tasks == goals) + * A goal is actually a goal within a plugin. + * Validate, Compile, Test, Package, Integration Test, Verify, Install and Deploy + * Maven watcher plugins - https://github.com/rzymek/watcher-maven-plugin + * a goal is executed in a phase, and the phases are executed sequentially. +* Gradle + * build.gradle is Groovy based, follows maven convention + * Gradle build files are more concise + * Lacks - Interactive console features (as of 2014) + * Continious build is available - https://docs.gradle.org/current/userguide/command_line_interface.html#sec:continuous_build + * Gradles task dependency mechanism isnt explicit + * Maven, Gradle, and Ant arent really interactive environments. + +# Why SBT? +* With sbt, each task has an output and explicit dependencies +* build typessafety, flexibility and the ability to insert custom tasks easily +* tasks can be executed in parallel by default. +* Sbt uses the dependency tree to ensure that tasks are executed in the right order. +* SBT has no phases, goals or executions (it has only tasks) +* SBT := Tasks + Settings ( + Configurations) +* crossScalaVersions - supports crossVersion of scala output library + +# What is SBT? +* If you run the test task, sbt will run the compile task beforehand. +* Once task is generic enough or useful enough, you can turn the task into a plugin +* Every task that was executed has a value in SBT (lack of value shows that it wasn't executed) +* sbt uses Ivy for its dependency resolution, whereas Maven uses Aether. +* The ability to explore your project using the Scala REPL + +# Settings and initialization +* ":=" Assigns an initialization expression to a key. Overrides any previous value. +* In sbt all settings can be implemented in terms of the := operator. For example, foo += bar.value is just foo := foo.value + bar.value. +* You can access the value of another setting using the value method, version.value, organization.value +* libraryDependencies :: Seq[ModuleID] + * Has += and ++= (one for single value append and another to append Seq[ModuleID] +* Initializations are code + * Initializations can read environment variables, properties, files, or anything else available in the JVM. +* Settings can depend on other settings like spread-sheet, be careful about circular dependency +* := operator used to assign value, whereas "=" define the placeholder for the variable of task or settings + +# SBT Configuration +* Configurations are namespaces for keys. (like packages) [sources in Compile vs sources in Test] +* Configurations allow the same key, or column, to be reused to serve different purposes. +* Compile, Test, Runtime, IntegrationTest +* ThisBuild configuration would be used for catch-all scenario + + +# SBT Plugins +* plugins brings additional settings and tasks +* /project - is a project about build itself, build-project +* /project/plugins.sbt - name doesn't matter, you can call it myaddonplugins.sbt, or anything.sbt + * Example to add plugins - addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.5.0") + * plugins can have its own resolvers - resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases/" + * resolvers in build.sbt and plugins.sbt are dedicated, we may not to declare sometime in both the places + * %% in plugins would resolve to sbt version 0.13, 0.12 or 1.11 +* Scala version used when resolving plugins for sbt may be different from build.sbt Scala version +* Important plugins + * SbtResolver + * addSbtPlugin("io.spray" % "sbt-revolver" % "0.7.2") - restarts main application if there are code change + * sbt-resolver is not spray specific, and reStart and reStop command can be used with ~ + * ~reStart - would automatically restarts the application + * Uber jar assembler - addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.11.2") + * import AssemblyKeys._; assemblySettings + * mainClass in assembly := Some("Global") + * declare merge stategy (in build.sbt) for handling duplicate (concatenate, use new, use old) + * mainClass in assembly := Some("Global") -- should be mentioned for executable jar + * You might even have - libraryDependencies ++= Seq( "org.apache.velocity" % "velocity" % "1.7") in order for Build.scala to use plugins + * $HOME/.sbt/0.13/credentials.sbt: + * credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", "myusername","mypassword") + +# build.sbt vs Build.scala +* build.sbt - declarative approach like Maven, Gradle +* Build.scala - pure scala centric approach +* + +# SBT Session +``` +sbt +console +projects +reload #sbt has been changed in another console +tasks +settings +test +testOnly +``` + +# Checklists +* A rule of thumb, if there is an Exception-InInitalization when starting an sbt, grep of your build.sbt files for a val without the lazy. lazy may solve the problem as some values are used before it is being executed +* In sbt, the fundamental unit of concurrency is the task. To increase parallelism of your builds, you need to have more tasks that can run simultaneously. + +* Define a task in sbt that takes the output of a shell command and retrieves the first line: +```SBT +val gitHeadCommitSha = taskKey[String]("Determines the current git commit SHA") +gitHeadCommitSha := Process("git rev-parse HEAD").lines.head +``` + + +# References +* [SBT in Action source code](https://github.com/jsuereth/sbt-in-action-examples) diff --git a/src/main/md/scala/sbt_in_action/project_kittens.sbt b/src/main/md/scala/sbt_in_action/project_kittens.sbt new file mode 100644 index 00000000..360ee6c0 --- /dev/null +++ b/src/main/md/scala/sbt_in_action/project_kittens.sbt @@ -0,0 +1,46 @@ +name := "preowned-kittens" + +// Custom keys for this build. + +val gitHeadCommitSha = taskKey[String]("Determines the current git commit SHA") + +val makeVersionProperties = taskKey[Seq[File]]("Creates a version.properties file we can find at runtime.") + +// Common settings/definitions for the build + +def PreownedKittenProject(name: String): Project = ( + Project(name, file(name)) + settings( + version := "1.0", + organization := "com.preownedkittens", + libraryDependencies += "org.specs2" % "specs2_2.10" % "1.14" % "test" + ) +) + +gitHeadCommitSha in ThisBuild := Process("git rev-parse HEAD").lines.head + +// Projects in this build + +lazy val common = ( + PreownedKittenProject("common"). + settings( + makeVersionProperties := { + val propFile = (resourceManaged in Compile).value / "version.properties" + val content = "version=%s" format (gitHeadCommitSha.value) + IO.write(propFile, content) + Seq(propFile) + } + ) +) + +val analytics = ( + PreownedKittenProject("analytics"). + dependsOn(common). + settings() +) + +val website = ( + PreownedKittenProject("website"). + dependsOn(common). + settings() +) \ No newline at end of file diff --git a/src/main/md/scala/scala_check.md b/src/main/md/scala/scala_check.md index 62a19331..49efde41 100644 --- a/src/main/md/scala/scala_check.md +++ b/src/main/md/scala/scala_check.md @@ -1,244 +1,244 @@ -# ScalaCheck Property-based software testing -* Specifications and tests and ScalaCheck vs Junit -* A specification is a definition of a program's abstract behavior, provides complete picture and informal -* Tests are concrete examples of how a program should behave in particular situations and it is formal. -* In TDD and BDD, tests as specification is to make your specification more test-centered. It is also known as executable specification. -* Property-based testing goes in the opposite direction by making your tests more specification-like -* Every system has structure and behaviour. In BDD we test behaviour of the system, Test cases are coarse grained level or use-case level. - -* Sample scala check -```Scala - /** Dependency -- "org.scalacheck" %% "scalacheck" % "1.13.5" **/ - import org.scalacheck.Properties - import org.scalacheck.Prop.forAll - - object MathProps extends Properties("Math") { - - property("max") = forAll { (x: Int, y: Int) => - val z = java.lang.Math.max(x, y) - (z == x || z == y) && (z >= x && z >= y) - } - - } -``` -* Properties are sometimes called parameterized tests -* Benefits of properties based testing - * Increased - Test coverage due to randomized input - * Specification completeness - due to abstract test - * Maintenance - less code to maintain and refactor - * Test case simplification - Finding smallest set for which test case fails -* When property based test case fails. - * Handle it in the implementation code, and repurcusion may impact test case, hence handle it in testcase - * Handle the exception in the propery-based test case - * Ignore the particular case, by filtering out possibility in the test-case -* ScalaCheck == Properties ++ Generators -* Properties - * A single property in ScalaCheck is the smallest testable unit. It is represented by an instance of the org.scalacheck.Prop class. - * org.scalacheck.Properties is a class with lot of operators to compose multiple Property. - * org.scalacheck.Properties.property method is used to add named properties to the set. -* Generator - * Generator can generate for any type, Implicit used instead of reflection to find the type T. - ```Scala - class Gen[+T] { - def apply(prms: Gen.Params): Option[T] - } - ``` - * Generator might sometime fail, That is why return type is Option[T] - * org.scalacheck.Gen has function to accept function that expects Generated value, normally we don't need to deal with Generator including custom generator. - * Ensure instance of Arbitrary[T] is in path would be enough to supply T for any testcase - -* Sample custom generator. -```Scala - import org.scalacheck.Gen.{choose, oneOf} - - case class Person (firstName: String, lastName: String, age: Int) { def isTeenager = age >= 13 && age <= 19 } - - val genPerson = for { - firstName <- oneOf("Alan", "Ada", "Alonzo") - lastName <- oneOf("Lovelace", "Turing", "Church") - age <- choose(1,100) - } yield Person(firstName, lastName, age) - - import org.scalacheck.Arbitrary - implicit val arbPerson = Arbitrary(genPerson) - val propPerson = forAll { p: Person => p.isTeenager == (p.age >= 13 && p.age <= 19)} -``` - - -# Designing properties -* It can be hard to immediately come up with a ScalaCheck property that completely describes all behavioral aspects that you want to specify. -* Don't be afraid of leaving a property incomplete if it starts to get too involved. -* Strive to make your properties straightforward, and try to make them as different as possible from the tested code, even if it means you lose specification completeness. -* Relation properties - * A special form of incomplete properties are relation properties. - * Instead of specifying a unit of code against one input instance at a time, you can use two or more test cases in the same property - * Base your specification on the relation between the inputs. -* Reference implementations - * Example: Write property for a specialized integer map behaves exactly like the generic hash map. - * Can be used only when direct specifications are hard to define or when it is very simple to define a correct reference implementation that can be used to test a more complex one - * Can be used in an iterative development process, testing new and improved implementations against old and stable ones -* Methods that has pre-condition (Restricting TestCases) - * Example don't invoke with null, and class should be in certain state (precondition) - * This is equivalent to saying that if the precondition is false, then the method behaves correctly no matter what it does. - * Generally, it is never a bad idea to write out the preconditions in your properties, since it also works as documentation of the code under test. - * When ScalaCheck tests property, it skips over the cases where the precondition is not fulfilled, and regards them as discarded tests. - * ScalaCheck's test case simplification feature can cause troubles when methods with preconditions are tested. - * In such cases, you need to specify the complete precondition in your property even though you are using custom generators for the method input -* Round-Trip Properties - * Encoder-Decoder :: decode(encode(x)) == x - * Serializer-Deserializer :: deseiralize(serialize(x)) == x - * xs: List[Int] => xs.reverse.reverse == xs - * parse(prettyPrint(ast)) == ast -* Constructing Optimal output - * A variation on the round-trip theme is a case when it is easy to synthesize an optimal output for the method you're testing - * But not equally trivial to check whether a given output is optimal. - * Produce some output and calculate input, and test method with calculated input. - - # Properties in Details - * We can label property and test arguments using String or Symbol. We need to use Operator |:. (| is always facing label, : Always facing property) or :| - * Instead of label feature, we could also use ScalaTest PropertyChecks style, it would automatically give clear message when test fails. IDE friendly line number also presented. - * Smart equivalence checks using ?= or =?. When boolean expression fails, print both side values. - * The value closest to the question mark will be presented as the actual value and the value closest to the equal sign will be presented as the expected one. - * Collecting test statistics using classify (and nested classify). - * Prop.forAll, Prop.throws, Prop.exists, Prop.all and Prop.atLeastOne. Alternatively && and || could also be used instead of forAll and atLeastOne. - * Know about usage of org.scalacheck.Prop.{ undecided, proved, passed, exception, falsified } - * Sample - ```scala - import org.scalacheck.Prop.{AnyOperators, forAll, classify} - - forAll(choose(0,100) :| "pos", choose(-10,0) :| "neg")(_ * _ < 0) - forAll('prime |: oneOf(2,3,5,7)) { prime => prime % 2 != 0 } - - val propInterleave = - forAll { (xs: List[Int], ys: List[Int]) => - val res = interleave(xs,ys) - val is = (0 to math.min(xs.length, ys.length)-1).toList - all( - "length" |: - xs.length+ys.length =? res.length, - "zip xs" |: - xs =? is.map(i => res(2*i)) ++ res.drop(2*ys.length), - "zip ys" |: - ys =? is.map(i => res(2*i+1)) ++ res.drop(2*xs.length) - ) - } - - val p = forAll { n:Int => - classify(n % 2 == 0, "even", "odd") { - classify(n < 0, "neg", "pos") { - classify(math.abs(n) > 50, "large") { - n+n == 2*n - } - } - } - } - - val propSorted = forAll { xs: List[Int] => - val r = xs.sorted - - val isSorted = r.indices.tail.forall(i => r(i) >= r(i-1)) - val containsAll = xs.forall(r.contains) - val correctSize = xs.size == r.size - - isSorted :| "sorted" && - containsAll :| "all elements" && - correctSize :| "size" - } - - ``` -# Generators in Details -* Give up and discarded - * One way of creating a new generator is to attach a filter to an existing one, by using the Gen.suchThat method - * If you use this generator in a property, each filtered generator value results in a **discarded property** evaluation. - * If you add a filter that is too narrow, too many values will be discarded and ScalaCheck will **give up on checking the property**. In those cases using Gen.retryUntil with caution. -* Size and resize for custom generators - * Gen.sized and Gen.resize When ScalaCheck produces data with a generator, it tells the generator what data size it wants. - * Size lets you test a property with increasingly larger data sets. A generator may interpret the data size parameter freely, or even ignore it if it doesn't make sense to use it. - * When you implement your own generator, you can use the size variable by utilizing the Gen.sized or Gen.resize methods. - * The Gen.size method takes an anonymous function as its only parameter, and this function in turn takes an integer value as its parameter. -* Gen.const, Gen.fail -* Generators could be composed to create Higher-order generators or recursive Generators -* Higher-order generators - * Gen.sequence - * Gen.frequency -```Scala - import org.scalacheck.Gen.{choose, negNum, posNum} /** show few more generatos **/ - /** AlphaChar, String Generator **/ - val genString = for { - c1 <- Gen.numChar - c2 <- Gen.alphaUpperChar - c3 <- Gen.alphaLowerChar - c4 <- Gen.alphaChar - c5 <- Gen.alphaNumChar - } yield List(c1,c2,c3,c4,c5).mkString - - alpha <- Gen.alphaStr - num <- Gen.numStr - id <- Gen.identifier -``` - -* Some reusable template -```Scala - Gen.sequence(List(choose(1,10), const(20), const(30))) - Gen.frequency((1, oddNumberGen), (2, evenNumberGen), (4, 0) ) - val genNotZero = Gen.oneOf(choose(-10,-1), choose(1,10)) - val users: List[User] = db.getUsers - val genUsers = Gen.oneOf(users) - val genIntList = Gen.listOf(choose(0,10)) - val genEightBytes = Gen.listOfN(8, arbitrary[Byte]) - val genIntList = Gen.containerOf[List,Int](choose(0,10)) - val oddInt = arbitrary[Int] suchThat (_ % 2 != 0) (Potential discarded property) - val oddInt = arbitrary[Int] retryUntil (_ % 2 != 0) (Discarded property will be zero) - val numbers = Gen.someOf(List(1,2,3,4)) - val twoStrings = Gen.pick(2, List("red", "blue", "green", "pink")) - - - def genList[T](genElem: Gen[T]): Gen[List[T]] = { - sized { sz: Int => - for { - listSize <- Gen.choose(0, sz) - list <- Gen.listOfN(listSize, genElem) - } yield list - } - } -``` -# Shrink framework -* Shrink allows to find a case that fails for 100s parameters into a case `as simple as` possible. -* Shrink framework would try to narrow down failed cases. -* Shrink framework use Stream type to evaluates all elements lazily, which makes the process of simplifying test cases more performant. -* Sometime shrink framework would try to minimize and would end-up other problem. -* The shrink method takes a value and returns a stream of simpler variants of that value. - * You are free to implement the shrink method in a way that makes sense for your particular type. - * Shrink method must converge towards an empty stream. - * if you run shrink on elements in its output, the original value is not allowed to re-appear. - * org.scalacheck.Shrink.shrink(10).check => 0, 5, -5, 8, -8, 9, -9, empty -```Scala - trait Shrink[T] { - def shrink(x: T): scala.collection.immutable.Stream[T] - } -``` -# Runtime considerations -* Prefer using PropertyChecks style when using scalaTest -* ScalaCheck runtime parameters - * maxDiscardedRatio - determines how hard ScalaCheck will try before giving up on a property - * minSuccessfulTests - parameter, default value is 100. - Default is 5. - * if the minimum number of successful tests is 100, then 500 discarded attempts are allowed before property evaluation is aborted. - * maxDiscardedRatio - ScalaCheck will always try at least as many evaluations as the specified minimum number of successful tests, even if the maximum discarded ratio is exceeded at some point during the evaluation iteration - * Size - * minSize and maxSize - ScalaCheck can control the size of the generated test data by providing the generator with a size parameter, a hint about how large the generated value should be. - * The size makes most sense for generators that produce some kind of collection. - * workers - ScalaCheck can use several threads in parallel when checking a property. The thread count is controlled by the workers parameter. - * Test.Parameters.default.withMinSuccessfulTests(5000).withWorkers(2) - * Test.check(oneWorker, p).time - Can find time take to test a property - * org.scalacheck.Test.TestCallback. This object will receive callbacks during the test execution. - * The main difference compared with Checkers is that you need not use ScalaCheck labels with PropertyChecks. - * When using with SBT - ```Scala - testOptions in Test += - Tests.Argument( - TestFrameworks.ScalaCheck, - "-maxDiscardRatio", "10", - "-minSuccessfulTests", "1000" - ) - ``` - * scala -cp scalacheck.jar:. ListSpec --help +# ScalaCheck Property-based software testing +* Specifications and tests and ScalaCheck vs Junit +* A specification is a definition of a program's abstract behavior, provides complete picture and informal +* Tests are concrete examples of how a program should behave in particular situations and it is formal. +* In TDD and BDD, tests as specification is to make your specification more test-centered. It is also known as executable specification. +* Property-based testing goes in the opposite direction by making your tests more specification-like +* Every system has structure and behaviour. In BDD we test behaviour of the system, Test cases are coarse grained level or use-case level. + +* Sample scala check +```Scala + /** Dependency -- "org.scalacheck" %% "scalacheck" % "1.13.5" **/ + import org.scalacheck.Properties + import org.scalacheck.Prop.forAll + + object MathProps extends Properties("Math") { + + property("max") = forAll { (x: Int, y: Int) => + val z = java.lang.Math.max(x, y) + (z == x || z == y) && (z >= x && z >= y) + } + + } +``` +* Properties are sometimes called parameterized tests +* Benefits of properties based testing + * Increased - Test coverage due to randomized input + * Specification completeness - due to abstract test + * Maintenance - less code to maintain and refactor + * Test case simplification - Finding smallest set for which test case fails +* When property based test case fails. + * Handle it in the implementation code, and repurcusion may impact test case, hence handle it in testcase + * Handle the exception in the propery-based test case + * Ignore the particular case, by filtering out possibility in the test-case +* ScalaCheck == Properties ++ Generators +* Properties + * A single property in ScalaCheck is the smallest testable unit. It is represented by an instance of the org.scalacheck.Prop class. + * org.scalacheck.Properties is a class with lot of operators to compose multiple Property. + * org.scalacheck.Properties.property method is used to add named properties to the set. +* Generator + * Generator can generate for any type, Implicit used instead of reflection to find the type T. + ```Scala + class Gen[+T] { + def apply(prms: Gen.Params): Option[T] + } + ``` + * Generator might sometime fail, That is why return type is Option[T] + * org.scalacheck.Gen has function to accept function that expects Generated value, normally we don't need to deal with Generator including custom generator. + * Ensure instance of Arbitrary[T] is in path would be enough to supply T for any testcase + +* Sample custom generator. +```Scala + import org.scalacheck.Gen.{choose, oneOf} + + case class Person (firstName: String, lastName: String, age: Int) { def isTeenager = age >= 13 && age <= 19 } + + val genPerson = for { + firstName <- oneOf("Alan", "Ada", "Alonzo") + lastName <- oneOf("Lovelace", "Turing", "Church") + age <- choose(1,100) + } yield Person(firstName, lastName, age) + + import org.scalacheck.Arbitrary + implicit val arbPerson = Arbitrary(genPerson) + val propPerson = forAll { p: Person => p.isTeenager == (p.age >= 13 && p.age <= 19)} +``` + + +# Designing properties +* It can be hard to immediately come up with a ScalaCheck property that completely describes all behavioral aspects that you want to specify. +* Don't be afraid of leaving a property incomplete if it starts to get too involved. +* Strive to make your properties straightforward, and try to make them as different as possible from the tested code, even if it means you lose specification completeness. +* Relation properties + * A special form of incomplete properties are relation properties. + * Instead of specifying a unit of code against one input instance at a time, you can use two or more test cases in the same property + * Base your specification on the relation between the inputs. +* Reference implementations + * Example: Write property for a specialized integer map behaves exactly like the generic hash map. + * Can be used only when direct specifications are hard to define or when it is very simple to define a correct reference implementation that can be used to test a more complex one + * Can be used in an iterative development process, testing new and improved implementations against old and stable ones +* Methods that has pre-condition (Restricting TestCases) + * Example don't invoke with null, and class should be in certain state (precondition) + * This is equivalent to saying that if the precondition is false, then the method behaves correctly no matter what it does. + * Generally, it is never a bad idea to write out the preconditions in your properties, since it also works as documentation of the code under test. + * When ScalaCheck tests property, it skips over the cases where the precondition is not fulfilled, and regards them as discarded tests. + * ScalaCheck's test case simplification feature can cause troubles when methods with preconditions are tested. + * In such cases, you need to specify the complete precondition in your property even though you are using custom generators for the method input +* Round-Trip Properties + * Encoder-Decoder :: decode(encode(x)) == x + * Serializer-Deserializer :: deseiralize(serialize(x)) == x + * xs: List[Int] => xs.reverse.reverse == xs + * parse(prettyPrint(ast)) == ast +* Constructing Optimal output + * A variation on the round-trip theme is a case when it is easy to synthesize an optimal output for the method you're testing + * But not equally trivial to check whether a given output is optimal. + * Produce some output and calculate input, and test method with calculated input. + + # Properties in Details + * We can label property and test arguments using String or Symbol. We need to use Operator |:. (| is always facing label, : Always facing property) or :| + * Instead of label feature, we could also use ScalaTest PropertyChecks style, it would automatically give clear message when test fails. IDE friendly line number also presented. + * Smart equivalence checks using ?= or =?. When boolean expression fails, print both side values. + * The value closest to the question mark will be presented as the actual value and the value closest to the equal sign will be presented as the expected one. + * Collecting test statistics using classify (and nested classify). + * Prop.forAll, Prop.throws, Prop.exists, Prop.all and Prop.atLeastOne. Alternatively && and || could also be used instead of forAll and atLeastOne. + * Know about usage of org.scalacheck.Prop.{ undecided, proved, passed, exception, falsified } + * Sample + ```scala + import org.scalacheck.Prop.{AnyOperators, forAll, classify} + + forAll(choose(0,100) :| "pos", choose(-10,0) :| "neg")(_ * _ < 0) + forAll('prime |: oneOf(2,3,5,7)) { prime => prime % 2 != 0 } + + val propInterleave = + forAll { (xs: List[Int], ys: List[Int]) => + val res = interleave(xs,ys) + val is = (0 to math.min(xs.length, ys.length)-1).toList + all( + "length" |: + xs.length+ys.length =? res.length, + "zip xs" |: + xs =? is.map(i => res(2*i)) ++ res.drop(2*ys.length), + "zip ys" |: + ys =? is.map(i => res(2*i+1)) ++ res.drop(2*xs.length) + ) + } + + val p = forAll { n:Int => + classify(n % 2 == 0, "even", "odd") { + classify(n < 0, "neg", "pos") { + classify(math.abs(n) > 50, "large") { + n+n == 2*n + } + } + } + } + + val propSorted = forAll { xs: List[Int] => + val r = xs.sorted + + val isSorted = r.indices.tail.forall(i => r(i) >= r(i-1)) + val containsAll = xs.forall(r.contains) + val correctSize = xs.size == r.size + + isSorted :| "sorted" && + containsAll :| "all elements" && + correctSize :| "size" + } + + ``` +# Generators in Details +* Give up and discarded + * One way of creating a new generator is to attach a filter to an existing one, by using the Gen.suchThat method + * If you use this generator in a property, each filtered generator value results in a **discarded property** evaluation. + * If you add a filter that is too narrow, too many values will be discarded and ScalaCheck will **give up on checking the property**. In those cases using Gen.retryUntil with caution. +* Size and resize for custom generators + * Gen.sized and Gen.resize When ScalaCheck produces data with a generator, it tells the generator what data size it wants. + * Size lets you test a property with increasingly larger data sets. A generator may interpret the data size parameter freely, or even ignore it if it doesn't make sense to use it. + * When you implement your own generator, you can use the size variable by utilizing the Gen.sized or Gen.resize methods. + * The Gen.size method takes an anonymous function as its only parameter, and this function in turn takes an integer value as its parameter. +* Gen.const, Gen.fail +* Generators could be composed to create Higher-order generators or recursive Generators +* Higher-order generators + * Gen.sequence + * Gen.frequency +```Scala + import org.scalacheck.Gen.{choose, negNum, posNum} /** show few more generatos **/ + /** AlphaChar, String Generator **/ + val genString = for { + c1 <- Gen.numChar + c2 <- Gen.alphaUpperChar + c3 <- Gen.alphaLowerChar + c4 <- Gen.alphaChar + c5 <- Gen.alphaNumChar + } yield List(c1,c2,c3,c4,c5).mkString + + alpha <- Gen.alphaStr + num <- Gen.numStr + id <- Gen.identifier +``` + +* Some reusable template +```Scala + Gen.sequence(List(choose(1,10), const(20), const(30))) + Gen.frequency((1, oddNumberGen), (2, evenNumberGen), (4, 0) ) + val genNotZero = Gen.oneOf(choose(-10,-1), choose(1,10)) + val users: List[User] = db.getUsers + val genUsers = Gen.oneOf(users) + val genIntList = Gen.listOf(choose(0,10)) + val genEightBytes = Gen.listOfN(8, arbitrary[Byte]) + val genIntList = Gen.containerOf[List,Int](choose(0,10)) + val oddInt = arbitrary[Int] suchThat (_ % 2 != 0) (Potential discarded property) + val oddInt = arbitrary[Int] retryUntil (_ % 2 != 0) (Discarded property will be zero) + val numbers = Gen.someOf(List(1,2,3,4)) + val twoStrings = Gen.pick(2, List("red", "blue", "green", "pink")) + + + def genList[T](genElem: Gen[T]): Gen[List[T]] = { + sized { sz: Int => + for { + listSize <- Gen.choose(0, sz) + list <- Gen.listOfN(listSize, genElem) + } yield list + } + } +``` +# Shrink framework +* Shrink allows to find a case that fails for 100s parameters into a case `as simple as` possible. +* Shrink framework would try to narrow down failed cases. +* Shrink framework use Stream type to evaluates all elements lazily, which makes the process of simplifying test cases more performant. +* Sometime shrink framework would try to minimize and would end-up other problem. +* The shrink method takes a value and returns a stream of simpler variants of that value. + * You are free to implement the shrink method in a way that makes sense for your particular type. + * Shrink method must converge towards an empty stream. + * if you run shrink on elements in its output, the original value is not allowed to re-appear. + * org.scalacheck.Shrink.shrink(10).check => 0, 5, -5, 8, -8, 9, -9, empty +```Scala + trait Shrink[T] { + def shrink(x: T): scala.collection.immutable.Stream[T] + } +``` +# Runtime considerations +* Prefer using PropertyChecks style when using scalaTest +* ScalaCheck runtime parameters + * maxDiscardedRatio - determines how hard ScalaCheck will try before giving up on a property + * minSuccessfulTests - parameter, default value is 100. - Default is 5. + * if the minimum number of successful tests is 100, then 500 discarded attempts are allowed before property evaluation is aborted. + * maxDiscardedRatio - ScalaCheck will always try at least as many evaluations as the specified minimum number of successful tests, even if the maximum discarded ratio is exceeded at some point during the evaluation iteration + * Size + * minSize and maxSize - ScalaCheck can control the size of the generated test data by providing the generator with a size parameter, a hint about how large the generated value should be. + * The size makes most sense for generators that produce some kind of collection. + * workers - ScalaCheck can use several threads in parallel when checking a property. The thread count is controlled by the workers parameter. + * Test.Parameters.default.withMinSuccessfulTests(5000).withWorkers(2) + * Test.check(oneWorker, p).time - Can find time take to test a property + * org.scalacheck.Test.TestCallback. This object will receive callbacks during the test execution. + * The main difference compared with Checkers is that you need not use ScalaCheck labels with PropertyChecks. + * When using with SBT + ```Scala + testOptions in Test += + Tests.Argument( + TestFrameworks.ScalaCheck, + "-maxDiscardRatio", "10", + "-minSuccessfulTests", "1000" + ) + ``` + * scala -cp scalacheck.jar:. ListSpec --help diff --git a/src/main/md/scala/scala_type_system.md b/src/main/md/scala/scala_type_system.md index b0989291..e58cccf7 100644 --- a/src/main/md/scala/scala_type_system.md +++ b/src/main/md/scala/scala_type_system.md @@ -1,12 +1,12 @@ -# Scala type-system - -* Scala compiler let us treat types themselves as values to certain extent -* Type alias are supported in Scala - * ```type UserId = Int``` -* We can tag known primitive type and enforce that API is used in right context -* Tagging is we are tagging existing value with additional context, so they can't be misused - * def tag[U](i : Int) : Int @@ U = i.asInstanceOf[Int @@ U] - * Usage: val userId = tag[User](10) ---now this id could be used as UserId - * def tag[U](i : String) : String @@ U = i.asInstanceOf[Int @@ U] - * Usage: val userpassword = tag[User]("Gibrish@#$123") ---now this String could be used as password +# Scala type-system + +* Scala compiler let us treat types themselves as values to certain extent +* Type alias are supported in Scala + * ```type UserId = Int``` +* We can tag known primitive type and enforce that API is used in right context +* Tagging is we are tagging existing value with additional context, so they can't be misused + * def tag[U](i : Int) : Int @@ U = i.asInstanceOf[Int @@ U] + * Usage: val userId = tag[User](10) ---now this id could be used as UserId + * def tag[U](i : String) : String @@ U = i.asInstanceOf[Int @@ U] + * Usage: val userpassword = tag[User]("Gibrish@#$123") ---now this String could be used as password * \ No newline at end of file diff --git a/src/main/md/scala/scalaz/Monad.md b/src/main/md/scala/scalaz/Monad.md new file mode 100644 index 00000000..a0fce57f --- /dev/null +++ b/src/main/md/scala/scalaz/Monad.md @@ -0,0 +1,28 @@ +# Monad pragmatic rules (not algebric) + +* Method signature should not lie +* Method signature should be acurate +* Method signature should reflect non-determinism +* Monad helps to achieve referential transparency. + * By returning computation, side-effect or non-determinism are passed to dependent + * For example, if below method is invoked twice, both the time results are equal since it does represent computation, not values + * val readTemperaturer = (fileName) => IO(File.read(fileName)) => IO[A] +* Return type should emphasize the computation in context, not actual values + * Result may not be available due to failure, lack of result + * There could be one or more result + * Computation may never return result and still in computation mode +* No one can peek into the result from outside, rather they could pass the dependent computation to the monad. (Hollywood principle, Monad type would call dependent method) +* Monad also gives a way to invoke sequence dependent operation using flatMap or bind operation + + +# Implementation details +* Wrap the problem into case class and define map, and flatMap +* When one of the function dealts with multiple parameter, using curry treat them as if they are simple Function1 (A =>B) + * val nFunc: A => B => C ~~~> val nFunc2: A => (B => C) + * In above declare new type for (B=>C) + +F[A] ~~~> Functor of A +* It may return result of type A +* It shows lack of result +* It may return multiple values of type A +* It represents computation, not result diff --git a/src/main/md/scala/scalaz/ReaderApp.scala b/src/main/md/scala/scalaz/ReaderApp.scala new file mode 100644 index 00000000..71f3cee0 --- /dev/null +++ b/src/main/md/scala/scalaz/ReaderApp.scala @@ -0,0 +1,49 @@ +package scala.scalaz + +object ReaderApp extends App { + //import scalaz.Reader + + case class Reader[A](f: Config => A) { + + def map[B](g: A => B): Reader[B] = { + val reader: Config => B = { config => + val x: A = f(config) + g(x) + } + Reader(reader) + } + + def flatMap[B](g: A => Reader[B]): Reader[B] = { + val sequence: Config => B = { config => + g(f(config)).f(config) + } + Reader(sequence) + } + + def run(config: Config): A = f(config) + + } + + case class Config(batchSize: Int) + + type IntToConfig = (Int => Config) + + implicit def ReaderFun[A](f: Config => A) = Reader(f) + + //(A, B) = C ~~~ A => (B => C) + def setBatchSize(conf: Config, size: Int): Config = conf.copy(batchSize = size) + def setBatch: Config => IntToConfig = (setBatchSize _).curried + def getBatchSize(conf: Config): Int = conf.batchSize + + + def scaleBatchSize(scale: Double) = + for { + f <- Reader((setBatchSize _).curried) + s <- Reader(getBatchSize) + } yield f((scale * s).toInt) + + val newConf = scaleBatchSize(2.0).run(Config(3)) // scalaz.Id.Id[Config] + + // Recall `type Id[A] = A` so no need to unwrap anything. + println(newConf.batchSize) // Int = 6 +} \ No newline at end of file diff --git a/src/main/md/scala/scalaz/ReaderMonad.md b/src/main/md/scala/scalaz/ReaderMonad.md new file mode 100644 index 00000000..07afba72 --- /dev/null +++ b/src/main/md/scala/scalaz/ReaderMonad.md @@ -0,0 +1,6 @@ +# Reader Monad + +* Reader and Writer are just State in disguise +* Provide environment single time + +* (https://github.com/LannyRipple/using-scalaz/wiki/Monad-Reader) diff --git a/src/main/md/scala/spark_streaming.md b/src/main/md/scala/spark_streaming.md new file mode 100644 index 00000000..e9a2aac8 --- /dev/null +++ b/src/main/md/scala/spark_streaming.md @@ -0,0 +1,14 @@ +# Streaming +* Batch as special case of streaming +* How to handle/collect errors in spark streaming? +* + + +# References +* [Structured Streaming](https://spark.apache.org/docs/latest/structured-streaming-programming-guide.html) +* http://milinda.pathirage.org/kappa-architecture.com/ +* https://data-artisans.com/blog/batch-is-a-special-case-of-streaming + +# Notes +* around for a year - seems like factually wrong +* either CSV and JSON (should be OR) \ No newline at end of file diff --git a/src/main/md/scratch.md b/src/main/md/scratch.md new file mode 100644 index 00000000..2abf4ed7 --- /dev/null +++ b/src/main/md/scratch.md @@ -0,0 +1,9 @@ +* Stage is screen +* Sprite is a character +* + +* [Double Jumps - Invent with Scratch 2.0 Screencast](https://www.youtube.com/watch?v=oRBfjK-qeXE&list=PL0-84-yl1fUkall6a14nqzXpG79-RgI1F) +* https://www.youtube.com/watch?v=0pxaFzRtx7k +* https://www.youtube.com/watch?v=QRy1NjDhp14 +* https://www.youtube.com/watch?v=WAIKC35sYME +* https://www.youtube.com/watch?v=hg2Q9kNUchQ \ No newline at end of file diff --git a/src/main/md/startup_ideas_project/WhatToBuild.md b/src/main/md/startup_ideas_project/WhatToBuild.md new file mode 100644 index 00000000..9e70b4a6 --- /dev/null +++ b/src/main/md/startup_ideas_project/WhatToBuild.md @@ -0,0 +1,45 @@ +* What do you wish someone would make for you? +* Fix things that seem broken, regardless of whether it seems like the problem is important enough to build a company on. + * If we keep pursuing such threads it would be hard not to end up making something of value to a lot of people +* Imagine a graph whose x axis represents all the people who might want what you're making and whose y axis represents how much they want it. (compare with google.com) +* Rather than trying to learn about "entrepreneurship." "Entrepreneurship" is something you learn best by doing it +* Over time, most of the ideas fade. But a choice few keep on coming back. Pay attention to those. You know you’ve got something good when you’re thinking about it in the shower. +* Make sure you enjoy thinking about it - Your primary edge as a founder will be the number of hours you spent thinking about a specific problem. (Grind factor) +* Get in the habit of simplifying. - “Scheming mode”/“Razor mode” +* you should track the problems you have, and the ones that reoccur will be good signals to solve them. +* During the American gold rush the people who made the money were the ones selling shovels +* Grind factor : Look for small ideas that you can execute well. And then execute, not for passion or ego, but as a grind. Like a regular job. +* What task your team has to do often that is better done by a software +* "5 whys" - to find actual problem, +* There is no simple technique to "validate your market". +* “gym of entrepreneurship.” Building a startup is one of those things that you need to work to get better at over time +* "every spreadsheet is an opportunity", but trying to remove spread-sheet alone is not sufficient +* Copy a startup that got acquired last year. +* It's no coincidence that there have been many successful products that have started as internal tools written to solve internal problems +* Look at "inevitable" trends. Things that seem like they will certainly be popular or prevalent in 2 to 5 years. + * Cannabis legalization + * VR + * IOT +* Do you have friends working in different fields ? Spend more time understanding their day to day job. +* Good product ideas usually come from a deep, subtle understanding of the problem domain, not by searching for new problems in markets you don't know anything about. +* By attending conferences where business owners are open to discovering new solutions. + * Why not build a application where people would ready to post problem with some money to sovle it. +* Don't come up with ideas. Finds problems. +* Scanning the "need a coder" lists for product ideas, like: RentACoder.com, oDesk and elance jobs +* Here is a company in New Jersey that browses amazon reviews for feature requests and creates those updated products in china and sells them on amazon. They do hundreds of millions a year in sales. + +# According to Dijkstra +* "Raise your quality standards as high as you can live with, avoid wasting your time on routine problems, and always try to work as closely as possible at the boundary of your abilities. Do this, because it is the only way of discovering how that boundary should be moved forward." +* "We all like our work to be socially relevant and scientifically sound. If we can find a topic satisfying both desires, we are lucky; if the two targets are in conflict with each other, let the requirement of scientific soundness prevail." +* "Never tackle a problem of which you can be pretty sure that (now or in the near future) it will be tackled by others who are, in relation to that problem, at least as competent and well-equipped as you." + +# References +* [Organic Startup Ideas](http://www.paulgraham.com/organic.html) +* [How To Get Status Ideas](http://paulgraham.com/startupideas.html) +* [Freighteningly Ambitious Status Ideas](http://paulgraham.com/startupideas.html) +* [How To Decide What To Build](https://dcgross.com/decide-what-to-build/) +* [indiehackers](https://www.indiehackers.com/forum/all-time/page/1) +* [halfbakery](http://www.halfbakery.com/category/Food) +* [The Problem With Problems](https://medium.com/black-n-white/the-problem-with-problems-47ee63bb3511) +* [Ask HN: What problem in your industry is a potential startup?](https://news.ycombinator.com/item?id=9799007) +* https://www.helloreads.com/ diff --git a/src/main/md/startup_ideas_project/community_buy.md b/src/main/md/startup_ideas_project/community_buy.md new file mode 100644 index 00000000..eab34e11 --- /dev/null +++ b/src/main/md/startup_ideas_project/community_buy.md @@ -0,0 +1,6 @@ +* Community Group Buy - for grocessory shopping +* https://drop.com/ +* Buy Together, Save Together! +* +# Refernce +* [](https://walkthechat.com/community-group-buy-the-next-billion-dollar-e-commerce-industry/) \ No newline at end of file diff --git a/src/main/md/toNotes.md b/src/main/md/toNotes.md new file mode 100644 index 00000000..6ba5dc78 --- /dev/null +++ b/src/main/md/toNotes.md @@ -0,0 +1 @@ +* https://www.quora.com/What-are-some-quick-hacks-to-understand-any-set-of-data-in-five-minutes-given-a-random-set-of-data-and-five-minutes-to-find-a-pattern-a-conclusion-the-structure-or-a-trend?share=4ce4eab2&srid=5Zp diff --git a/src/main/md/ubunut_gui_on_win10.md b/src/main/md/ubunut_gui_on_win10.md index 13b6474c..53dbc911 100644 --- a/src/main/md/ubunut_gui_on_win10.md +++ b/src/main/md/ubunut_gui_on_win10.md @@ -1,3 +1,8 @@ +# Recover root password +* boot in recovery mode and select root +* mount -rw -remount/ +* passwd <> + echo "export DISPLAY=:0.0" >> ~/.bashrc sudo apt-get install xvfb @@ -6,4 +11,18 @@ Xvfb :0 -screen 0 1920x1080x24 +extension GLX -nolisten tcp -dpi 96 apt-get install ubuntu-desktop apt-get install unity apt-get install compiz-core -apt-get install compizconfig-settings-manager \ No newline at end of file +apt-get install compizconfig-settings-manager + + +service --status-all +service ssh --full-restart + +@/etc/ssh/sshd_config +Match User nikias +ForceCommand internal-sftp +PasswordAuthentication yes +ChrootDirectory /mnt/d/git/Thales/ThalesExcelProcess/ +PermitTunnel no +AllowAgentForwarding no +AllowTcpForwarding no +X11Forwarding no \ No newline at end of file diff --git a/src/main/python/Intro_Pandas.png b/src/main/python/Intro_Pandas.png new file mode 100644 index 00000000..2fa7f171 Binary files /dev/null and b/src/main/python/Intro_Pandas.png differ diff --git a/src/main/python/Intro_Pandas.py b/src/main/python/Intro_Pandas.py new file mode 100644 index 00000000..7006af97 --- /dev/null +++ b/src/main/python/Intro_Pandas.py @@ -0,0 +1,80 @@ + +# coding: utf-8 + +# In[3]: + + +from sklearn.datasets import load_boston +boston_house_prices_data = load_boston() + + +# In[5]: + + +print(boston_house_prices_data.DESCR) + + +# In[6]: + + +import pandas as pd +import numpy as np + +pd.Series([1,3,4,2,3]) +pd.Series([1,np.nan,4,2,None]) + + +# In[7]: + + +num_rooms = pd.Series([1,np.nan,4,2,None]) +num_rooms.isnull() + + +# In[10]: + + +num_rooms[num_rooms.notnull()] + + +# In[16]: + + +df = pd.DataFrame( + [[1,np.nan, 2], + [2,300, 5], + [1,np.nan, np.nan], + ] +) +df + + +# In[17]: + + +df.isnull() + + +# In[18]: + + +df.dropna() + + +# In[20]: + + +means = df.mean(axis=0) + + +# In[21]: + + +means + + +# In[22]: + + +df.fillna(means) + diff --git a/src/main/python/KernelDensityEstimation.py b/src/main/python/KernelDensityEstimation.py new file mode 100644 index 00000000..d865b365 --- /dev/null +++ b/src/main/python/KernelDensityEstimation.py @@ -0,0 +1,25 @@ +from sklearn.datasets import load_iris +from sklearn.neighbors import KernelDensity +import matplotlib.pyplot as pyplot +import numpy as np + +iris = load_iris() + +X = iris.data + + + +#sepal with (cm) +feature = X[:, 1] +pyplot.hist(feature, bins=30, normed=True) +pyplot.show() + +plot_space = np.linspace(min(feature), max(feature), 1000) + +kde = KernelDensity(bandwidth=0.05, kernel='gaussian') +kde.fit(feature.reshape(-1,1)) +densities = np.exp(kde.score_samples(plot_space.reshape(-1,1))) + +pyplot.bar(plot_space, densities) +pyplot.ylim(min(densities)-1, max(densities) +1) +pyplot.show() \ No newline at end of file diff --git a/src/main/python/Polynomial_regression.py b/src/main/python/Polynomial_regression.py new file mode 100644 index 00000000..1dab72b4 --- /dev/null +++ b/src/main/python/Polynomial_regression.py @@ -0,0 +1,20 @@ +from sklearn.preprocessing import PolynomialFeatures +import numpy as np + +X = np.arange(4).reshape(2,2) +X + +poly = PolynomialFeatures(degree=2) +poly.fit_transform(X) + +from sklearn.linear_model import LinearRegression +from sklearn.pipeline import Pipeline +model = Pipeline([('poly', PolynomialFeatures(degree=3)), ('linear', LinearRegression(fit_intercept=False))]) + +x = np.arange(5) +y = 3 - 2 * x + x ** 2 - x ** 3 +np.stack([x,y]) + +model.fit(x[:,None],y) + +model.named_steps['linear'].coef_ \ No newline at end of file diff --git a/src/main/python/Regularization.py b/src/main/python/Regularization.py new file mode 100644 index 00000000..c2703263 --- /dev/null +++ b/src/main/python/Regularization.py @@ -0,0 +1,35 @@ +from sklearn.linear_model import LinearRegression +from sklearn.pipeline import Pipeline +from sklearn.preprocessing import PolynomialFeatures +from sklearn.model_selection import train_test_split +from sklearn import linear_model +import matplotlib.pyplot as pyplot +from pprint import pprint as _p + +X = boston_data.data +y = boston_data.target +X_train, X_test, y_train, Y_test = train_test_split(X,y,random_state=42) + +model = linear_model.LinearRegression() +model.fit(X_train, y_train) + +for coef, label in zip(model.coef_, boston_data.feature_names): + print("{:10.4f}".format(coef), label) +print("\n"*2) + +model = linear_model.Ridge(alpha=1) +model.fit(X_train, y_train) +for coef, label in zip(model.coef_, boston_data.feature_names): + print("{:10.4f}".format(coef), label) +print("\n"*2) + +model = linear_model.Lasso() +model.fit(X_train, y_train) +for coef, label in zip(model.coef_, boston_data.feature_names): + print("{:10.4f}".format(coef), label) +print("\n"*2) + +model = linear_model.Lasso(alpha=0.1) +model.fit(X_train, y_train) +for coef, label in zip(model.coef_, boston_data.feature_names): + print("{:10.4f}".format(coef), label) diff --git a/src/main/python/StandardizationNormalization.py.pdf b/src/main/python/StandardizationNormalization.py.pdf new file mode 100644 index 00000000..3e4d9d41 Binary files /dev/null and b/src/main/python/StandardizationNormalization.py.pdf differ diff --git a/src/main/python/StandardizationNormalization.py.py b/src/main/python/StandardizationNormalization.py.py new file mode 100644 index 00000000..8ec3c105 --- /dev/null +++ b/src/main/python/StandardizationNormalization.py.py @@ -0,0 +1,26 @@ + +# coding: utf-8 + +# In[6]: + + +data = [ + [0,0],[0,0], + [1,1],[1,1] +] +data + + +# In[4]: + + +from sklearn.preprocessing import StandardScaler +StandardScaler().fit_transform(data) + + +# In[7]: + + +from sklearn import preprocessing +preprocessing.normalize(data, norm='l2') + diff --git a/src/main/python/UnSupervised_DecisionTree.py b/src/main/python/UnSupervised_DecisionTree.py new file mode 100644 index 00000000..13e62ab4 --- /dev/null +++ b/src/main/python/UnSupervised_DecisionTree.py @@ -0,0 +1,28 @@ +from sklearn.datasets import load_iris +from sklearn.tree import DecisionTreeClassifier +from sklearn.model_selection import train_test_split + +iris_data = load_iris() +X = iris_data.data +y = iris_data.target +X_train, X_test, y_train, Y_test = train_test_split(X,y,random_state=42) + +model = DecisionTreeClassifier(max_leaf_nodes=3, random_state=0) +model.fit(X_train, y_train) + +from sklearn.tree import _tree + +def find_rules(tree, features): + dt = tree.tree_ + def visitor(node, depth): + indent = ' ' * depth + if dt.feature[node] != _tree.TREE_UNDEFINED: + print('{} if <{}> <= {}:'.format(indent, features[node], round(dt.threshold[node], 2))) + visitor(dt.children_left[node], depth + 1) + print('{} else:'.format(indent)) + visitor(dt.children_right[node], depth + 1) + else: + print('{} return {}'.format(indent, dt.value[node])) + visitor(0,1) + +find_rules(model, iris_data.feature_names) diff --git a/src/main/python/UnSupervised_DecisionTree.py.pdf b/src/main/python/UnSupervised_DecisionTree.py.pdf new file mode 100644 index 00000000..8f0fe68f Binary files /dev/null and b/src/main/python/UnSupervised_DecisionTree.py.pdf differ diff --git a/src/main/python/UnSupervised_KMeans.py b/src/main/python/UnSupervised_KMeans.py new file mode 100644 index 00000000..8cf1d5f8 --- /dev/null +++ b/src/main/python/UnSupervised_KMeans.py @@ -0,0 +1,25 @@ +import seaborn as sns +from sklearn.datasets import load_iris +from sklearn.cluster import KMeans + +import matplotlib.pyplot as pyplot +from mpl_toolkits.mplot3d import Axes3D + +iris = load_iris() +X = iris.data +y = iris.target + +model = KMeans(n_clusters=3) +model.fit(X) +labels = model.labels_ +labels + +# In[10]: +fig = pyplot.figure(1, figsize=(10,10)) +ax = Axes3D(fig, rect=[0,0,.95,1], elev=48, azim=134) + +ax.scatter(X[:, 3], X[:, 0], X[:, 2], c=labels.astype(np.float), edgecolor='k') + +ax.set_xlabel('Petal width') +ax.set_ylabel('Sepal length') +ax.set_zlabel('Petal length') \ No newline at end of file diff --git a/src/main/python/Unsupervised_SVM.py b/src/main/python/Unsupervised_SVM.py new file mode 100644 index 00000000..97b27285 --- /dev/null +++ b/src/main/python/Unsupervised_SVM.py @@ -0,0 +1,31 @@ +from sklearn.datasets import load_boston +from sklearn.model_selection import train_test_split + +boston_data = load_boston() + +X = boston_data.data +y = boston_data.target +X_train, X_test, y_train, Y_test = train_test_split(X,y,random_state=42) + + +from sklearn import svm, linear_model +from sklearn.metrics import mean_squared_error + + +model = svm.LinearSVR(random_state=42) +model.fit(X_train, y_train) +score = model.score(X_test, Y_test) +print(score) + + +model = svm.SVR() +model.fit(X_train, y_train) +score = model.score(X_test, Y_test) +print(score) + + +model = linear_model.LinearRegression() +model.fit(X_train, y_train) +y_pred = model.predict(X_test) +score = model.score(X_test, Y_test) +print(score) \ No newline at end of file diff --git a/src/main/python/boston_houseprice_prediction.pdf b/src/main/python/boston_houseprice_prediction.pdf new file mode 100644 index 00000000..cedd3aa7 Binary files /dev/null and b/src/main/python/boston_houseprice_prediction.pdf differ diff --git a/src/main/python/boston_houseprice_prediction.py b/src/main/python/boston_houseprice_prediction.py new file mode 100644 index 00000000..5fff3126 --- /dev/null +++ b/src/main/python/boston_houseprice_prediction.py @@ -0,0 +1,58 @@ +import pandas as pd +import numpy as np +import matplotlib.pyplot as pyplot +import seaborn as sns +from sklearn.datasets import load_boston +from sklearn.metrics import mean_squared_error + +boston_data = load_boston() +print(boston_data.DESCR) +X = boston_data.data +y = boston_data.target + +X_df = pd.DataFrame(X, columns=boston_data.feature_names) +X_df.head(5) + +len(y) + +sns.distplot(y) +pyplot.show() + +y_pred = [0] * 506 +error = mean_squared_error(y, y_pred) +print(error) + +sns.jointplot(X[:, 5],y) + +def manual_model(house): + return (house[5] - 4) * 10 + +y_pred = [manual_model(x) for x in X] +error = mean_squared_error(y, y_pred) +print(error) + +sns.jointplot(X[:, 5],y) + +from sklearn.linear_model import LinearRegression +from sklearn.model_selection import train_test_split + +model = LinearRegression() +X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.33, random_state=42) +model.fit(X_train, y_train) +y_pred_ml = model.predict(X_test) +error = mean_squared_error(y_test, y_pred_ml) +print(error) + +print('-----------------') +print('-----------------') +print('-----------------') + + +model2 = LinearRegression(normalize=True) +model2.fit(X_train, y_train) +y_pred_ml_normalized = model2.predict(X_test) +error = mean_squared_error(y_test, y_pred_ml_normalized) +print(error) + + + diff --git a/src/main/python/de_deduplicated.py b/src/main/python/de_deduplicated.py new file mode 100644 index 00000000..58c0d17c --- /dev/null +++ b/src/main/python/de_deduplicated.py @@ -0,0 +1,43 @@ + +# coding: utf-8 + +# In[2]: + + +import pandas as pd +from sklearn.datasets import load_boston +boston_house_prices_data = load_boston() +X = pd.DataFrame(boston_house_prices_data.data) + + +# In[3]: + + +X[X.duplicated()] + + +# In[6]: + + +X = X.append(X.iloc[0,:], ignore_index=True) + + +# In[7]: + + +X[X.duplicated()] + + +# In[15]: + + +X.iloc[[2,3,4],:] +X.iloc[[2,3,4],3] + + +# In[16]: + + +X=X.drop_duplicates() +X[X.duplicated()] + diff --git a/src/main/python/de_deuplicated.pdf b/src/main/python/de_deuplicated.pdf new file mode 100644 index 00000000..ac4edb1f Binary files /dev/null and b/src/main/python/de_deuplicated.pdf differ diff --git a/src/main/python/logistics_regression.py b/src/main/python/logistics_regression.py new file mode 100644 index 00000000..02781005 --- /dev/null +++ b/src/main/python/logistics_regression.py @@ -0,0 +1,110 @@ + +# coding: utf-8 + +# In[5]: + + +from sklearn.datasets import load_iris +import pandas as pd + +iris_data = load_iris() +print(iris_data.DESCR) + + +# In[7]: + + +X = iris_data.data +X_df = pd.DataFrame(X, columns=iris_data.feature_names) +X_df + + +# In[10]: + + +X_df['flower_type']=iris_data.target #append the target column, or Y +X_df.groupby('flower_type').mean() + + +# In[12]: + + +X_df.groupby('flower_type').max() + + +# In[13]: + + +X_df.groupby('flower_type').min() + + +# In[47]: + + +from sklearn.linear_model import LogisticRegression +model = LogisticRegression(multi_class='ovr', solver='newton-cg') + + +# In[58]: + + +from sklearn.model_selection import train_test_split +y = iris_data.target +X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=0.33,random_state=65) + + +# In[59]: + + +y_test.shape + + +# In[60]: + + +model.fit(X_train,y_train) + + +# In[61]: + + +model.predict(X_train[0].reshape(1,-1)) + + +# In[62]: + + +y_train[0] + + +# In[63]: + + +model.predict(X_test) + + +# In[64]: + + +y_test + + +# In[65]: + + +model.predict(X_test) == y_test + + +# In[66]: + + +y_pred = model.predict(X_test) +model.score(X_test,y_test) + + +# In[67]: + + +from sklearn.metrics import classification_report +print(classification_report(y_pred=y_pred, y_true=y_test)) + diff --git a/src/main/python/logistics_regression_v1.pdf b/src/main/python/logistics_regression_v1.pdf new file mode 100644 index 00000000..b8436499 Binary files /dev/null and b/src/main/python/logistics_regression_v1.pdf differ diff --git a/src/main/python/logistics_regression_v2.pdf b/src/main/python/logistics_regression_v2.pdf new file mode 100644 index 00000000..76b1499c Binary files /dev/null and b/src/main/python/logistics_regression_v2.pdf differ diff --git a/src/main/python/principle_component_analysis_pca.py b/src/main/python/principle_component_analysis_pca.py new file mode 100644 index 00000000..73f482bf --- /dev/null +++ b/src/main/python/principle_component_analysis_pca.py @@ -0,0 +1,34 @@ +from mpl_toolkits.mplot3d import Axes3D +from sklearn.cluster import KMeans +from sklearn.datasets import load_boston +from sklearn.datasets import load_iris +from sklearn.decomposition import PCA + +from sklearn import preprocessing +import matplotlib.pyplot as pyplot +import pandas as pd +import seaborn as sns +import numpy as np + +iris = load_iris() + +X = iris.data + +pca = PCA(n_components=3) +pca.fit(X) +X_pca = pca.transform(X) +X_pca_2 = pca.fit_transform(X) + +print(X_pca) + +print("\n"*5) +for pc in pca.components_: + _p(pc) + +print("\n"*5) +from pprint import pprint as _p +for pc in pca.components_: + _p(list(zip(iris.feature_names, pc))) + print('') + +pca.explained_variance_ratio_ \ No newline at end of file diff --git a/src/main/python/renameFileEntry.py b/src/main/python/renameFileEntry.py new file mode 100644 index 00000000..33d6f9e2 --- /dev/null +++ b/src/main/python/renameFileEntry.py @@ -0,0 +1,30 @@ +import os +import zipfile + +source_dir="D:/temp/" +target_dir = 'D:/temp/target' + +# Are you sure your files names are capitalized in your zip files? +target_filelist = ['test.txt'] + +def unzip_and_append_zipname(source_dir, filelist): + for item in os.listdir(source_dir): # loop through items in dir + if item.endswith(".zip"): # check for ".zip" extension + zip_file_name = item + zip_file_name_without_ext = os.path.splitext(item)[0] + append_to_target= os.path.splitext(item)[0] + file_path = os.path.join(source_dir, item) # get zip file path + print("current zip file >> " + file_path) + with zipfile.ZipFile(file_path) as zf: # open the zip file + for target_file in filelist: # loop through the list of files to extract + if target_file in zf.namelist(): # check if the file exists in the archive + # generate the desired output name: + target_name = zip_file_name_without_ext + "_" + target_file + target_path = os.path.join(target_dir, target_name) # output path + print("Writing output into file >> " + target_path) + with open(target_path, "wb") as f: # open the output path for writing + f.write(zf.read(target_file)) # save the contents of the file in it + # next file from the list... + # next zip file... + +unzip_and_append_zipname(source_dir, target_filelist) diff --git a/src/main/python/summarize_text.py b/src/main/python/summarize_text.py new file mode 100644 index 00000000..8ecf33cd --- /dev/null +++ b/src/main/python/summarize_text.py @@ -0,0 +1,120 @@ +#importing libraries +from nltk.corpus import stopwords +from nltk.stem import PorterStemmer +from nltk.tokenize import word_tokenize, sent_tokenize +import bs4 as BeautifulSoup +import urllib.request + +# import nltk +# nltk.download('stopwords') + + +#fetching the content from the URL +fetched_data = urllib.request.urlopen('https://en.wikipedia.org/wiki/World_War_II') + +article_read = fetched_data.read() + +#parsing the URL content and storing in a variable +article_parsed = BeautifulSoup.BeautifulSoup(article_read,'html.parser') + +#returning

tags +paragraphs = article_parsed.find_all('p') + +article_content = '' + +#looping through the paragraphs and adding them to the variable +for p in paragraphs: + article_content += p.text + + +def _create_dictionary_table(text_string) -> dict: + + #removing stop words + stop_words = set(stopwords.words("english")) + + words = word_tokenize(text_string) + + #reducing words to their root form + stem = PorterStemmer() + + #creating dictionary for the word frequency table + frequency_table = dict() + for wd in words: + wd = stem.stem(wd) + if wd in stop_words: + continue + if wd in frequency_table: + frequency_table[wd] += 1 + else: + frequency_table[wd] = 1 + + return frequency_table + + +def _calculate_sentence_scores(sentences, frequency_table) -> dict: + + #algorithm for scoring a sentence by its words + sentence_weight = dict() + + for sentence in sentences: + sentence_wordcount = (len(word_tokenize(sentence))) + sentence_wordcount_without_stop_words = 0 + for word_weight in frequency_table: + if word_weight in sentence.lower(): + sentence_wordcount_without_stop_words += 1 + if sentence[:7] in sentence_weight: + sentence_weight[sentence[:7]] += frequency_table[word_weight] + else: + sentence_weight[sentence[:7]] = frequency_table[word_weight] + + sentence_weight[sentence[:7]] = sentence_weight[sentence[:7]] / sentence_wordcount_without_stop_words + + + + return sentence_weight + +def _calculate_average_score(sentence_weight) -> int: + + #calculating the average score for the sentences + sum_values = 0 + for entry in sentence_weight: + sum_values += sentence_weight[entry] + + #getting sentence average value from source text + average_score = (sum_values / len(sentence_weight)) + + return average_score + +def _get_article_summary(sentences, sentence_weight, threshold): + sentence_counter = 0 + article_summary = '' + + for sentence in sentences: + if sentence[:7] in sentence_weight and sentence_weight[sentence[:7]] >= (threshold): + article_summary += " " + sentence + sentence_counter += 1 + + return article_summary + +def _run_article_summary(article): + + #creating a dictionary for the word frequency table + frequency_table = _create_dictionary_table(article) + + #tokenizing the sentences + sentences = sent_tokenize(article) + + #algorithm for scoring a sentence by its words + sentence_scores = _calculate_sentence_scores(sentences, frequency_table) + + #getting the threshold + threshold = _calculate_average_score(sentence_scores) + + #producing the summary + article_summary = _get_article_summary(sentences, sentence_scores, 1.5 * threshold) + + return article_summary + +if __name__ == '__main__': + summary_results = _run_article_summary(article_content) + print(summary_results) \ No newline at end of file diff --git a/src/main/resources/conf/logstash/apache_logstash.conf b/src/main/resources/conf/logstash/apache_logstash.conf new file mode 100644 index 00000000..b6b5fc59 --- /dev/null +++ b/src/main/resources/conf/logstash/apache_logstash.conf @@ -0,0 +1,32 @@ +input { + stdin {} +} + +filter { + grok { + match => { + "message" => '%{IPORHOST:clientip} %{USER:ident} %{USER:auth} \[%{HTTPDATE:timestamp}\] "%{WORD:verb} %{DATA:request} HTTP/%{NUMBER:httpversion}" %{NUMBER:response:int} (?:-|%{NUMBER:bytes:int}) %{QS:referrer} %{QS:agent}' + } +} + +geoip { + source => clientip + target => geoip +} + +useragent { + source => agent + target => useragent + } + + date { + match => [ "timestamp", "dd/MMM/YYYY:HH:mm:ss Z" ] + locale => en + } +} + +output { + stdout { + codec => dots + } +} \ No newline at end of file diff --git a/src/main/resources/conf/logstash/auto_cars_csv_logstash.conf b/src/main/resources/conf/logstash/auto_cars_csv_logstash.conf new file mode 100644 index 00000000..7aceafc5 --- /dev/null +++ b/src/main/resources/conf/logstash/auto_cars_csv_logstash.conf @@ -0,0 +1,28 @@ +input { + file { + path => "/home/nikias/Downloades/auto_cars.csv" + start_position => "begining" + sincedb_path => "/dev/null" + } +} + +filter { + csv { + separator => "," + columns => ["maker", "model", "mileage", "manufacture_year", "engine_displacement", "engine_power", "body_type", "color_slug", "stk_year", "transmission", "door_count", "seat_count", "fuel_type", "date_created", "date_last_year", "price_eur"] + } + mutate {convert => ["mutate", "integer"]} + mutate {convert => ["price_eur", "float"]} + mutate {convert => ["engine_power", "integer"]} + mutate {convert => ["door_count", "integer"]} + mutate {convert => ["seat_count", "integer"]} +} + +output { + elasticsearch { + hosts => "localhost" + index => "cars" + document_type => "sold_cars" + } + stdout {} +} \ No newline at end of file diff --git a/src/main/resources/jobs/myworkdayjobs_results.json b/src/main/resources/jobs/myworkdayjobs_results.json new file mode 100644 index 00000000..91344bd7 --- /dev/null +++ b/src/main/resources/jobs/myworkdayjobs_results.json @@ -0,0 +1,8780 @@ +{ + "id": "210073856", + "widget": "root", + "body": + { + "id": "210073857", + "label": "Search for Jobs", + "widget": "fieldSet", + "children": + [ + { + "id": "210073938", + "widget": "facetSearchResult", + "ecid": "facetSearchResult", + "facetContainer": + { + "id": "210073859", + "widget": "facetContainer", + "ecid": "facetContainer.newFacetSearch.FacetEntry", + "searchText": + { + "widget": "text" + }, + + "paginationCount": + { + "id": "210073860", + "widget": "number", + "ecid": "number.paginationCount", + "value": 866, + "totalDigits": 0, + "precision": 0 + }, + + "offset": + { + "id": "210073861", + "widget": "number", + "ecid": "number.offset", + "value": 0, + "totalDigits": 0, + "precision": 0 + }, + + "facets": + [ + { + "id": "210073862", + "label": "Job Category", + "widget": "facet", + "ecid": "facet.facet.FacetParameter", + "iid": "jobFamilyGroup", + "facetValues": + [ + { + "id": "210073863", + "label": "Sales", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737b29d343f646ef95", + "count": 387 + }, + + { + "id": "210073864", + "label": "Engineering", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737af39db3d96eed9f", + "count": 305 + }, + + { + "id": "210073865", + "label": "Marketing", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737b21f9f23c2eef58", + "count": 50 + }, + + { + "id": "210073866", + "label": "Contingent Worker", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100b37463d557f327785", + "count": 27 + }, + + { + "id": "210073867", + "label": "Information Technology", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737b0e880e0e3eeee9", + "count": 21 + }, + + { + "id": "210073868", + "label": "Customer Experience", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737ad9bb6642feecb7", + "count": 21 + }, + + { + "id": "210073869", + "label": "Finance & Operations", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737b00b5059406ee76", + "count": 17 + }, + + { + "id": "210073870", + "label": "Employee Experience", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737ae25403419eecd0", + "count": 11 + }, + + { + "id": "210073871", + "label": "Legal", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737b171b99fa16ef27", + "count": 9 + }, + + { + "id": "210073872", + "label": "Administration", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737acd8e5705b6ec42", + "count": 6 + }, + + { + "id": "210073873", + "label": "University", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "591af8b812fa10737b43a1662896f01c", + "count": 5 + } + ], + + "expandState": "" + }, + + { + "id": "210073874", + "label": "Locations", + "widget": "facet", + "ecid": "facet.facet.FacetParameter", + "iid": "locations", + "facetValues": + [ + { + "id": "210073875", + "label": "San Jose", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84cada7a9e46d2e", + "count": 256 + }, + + { + "id": "210073876", + "label": "San Francisco", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84cb08fd7b86d33", + "count": 210 + }, + + { + "id": "210073877", + "label": "New York", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ae98df6f36c0b", + "count": 94 + }, + + { + "id": "210073878", + "label": "Lehi", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84cc1f4dd686d51", + "count": 63 + }, + + { + "id": "210073879", + "label": "Seattle", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84caac741df6d29", + "count": 55 + }, + + { + "id": "210073880", + "label": "McLean", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84a7a29c9c46b52", + "count": 39 + }, + + { + "id": "210073881", + "label": "London", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84aaa846b246b9d", + "count": 39 + }, + + { + "id": "210073882", + "label": "Noida", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84aa7a3f1e46b98", + "count": 38 + }, + + { + "id": "210073883", + "label": "Boston", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b378a43226c97", + "count": 37 + }, + + { + "id": "210073884", + "label": "Bangalore", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84a8e6d38d46b70", + "count": 36 + }, + + { + "id": "210073885", + "label": "Tokyo", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d2327a08e6dec", + "count": 34 + }, + + { + "id": "210073886", + "label": "Bucharest", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ad31e29e16be3", + "count": 34 + }, + + { + "id": "210073887", + "label": "Maidenhead", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84dc3db0de26ef0", + "count": 31 + }, + + { + "id": "210073888", + "label": "Emeryville", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "c5c6352929d2108d37b2418831e3a732", + "count": 29 + }, + + { + "id": "210073889", + "label": "Chicago", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ae6db79016c06", + "count": 29 + }, + + { + "id": "210073890", + "label": "Munich", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b100f50d56c51", + "count": 27 + }, + + { + "id": "210073891", + "label": "Remote California", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84a9f3a98716b89", + "count": 25 + }, + + { + "id": "210073892", + "label": "Remote Texas", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84c991cf5176d0b", + "count": 24 + }, + + { + "id": "210073893", + "label": "Remote New York", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b59bbd5aa6cd3", + "count": 22 + }, + + { + "id": "210073894", + "label": "Paris", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b485303ed6cb5", + "count": 21 + }, + + { + "id": "210073895", + "label": "Sydney", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ca4ffde7e6d1f", + "count": 19 + }, + + { + "id": "210073896", + "label": "Washington D.C.", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d03f5b5006dbf", + "count": 18 + }, + + { + "id": "210073897", + "label": "Dublin", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84a9bea13b66b84", + "count": 17 + }, + + { + "id": "210073898", + "label": "Remote Illinois", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ade297a9e6bf7", + "count": 15 + }, + + { + "id": "210073899", + "label": "Singapore", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d1a405ef16ddd", + "count": 12 + }, + + { + "id": "210073900", + "label": "Remote Georgia", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84cbc28d0e96d47", + "count": 12 + }, + + { + "id": "210073901", + "label": "Remote Colorado", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ab98c6e026bb6", + "count": 12 + }, + + { + "id": "210073902", + "label": "Remote Bavaria", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ccaa15cac6d60", + "count": 12 + }, + + { + "id": "210073903", + "label": "Toronto", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "c5c6352929d2108d37bf7e47d10aa7c5", + "count": 11 + }, + + { + "id": "210073904", + "label": "Remote New Jersey", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84af4caea326c1f", + "count": 11 + }, + + { + "id": "210073905", + "label": "Ottawa", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84acabe89c36bd4", + "count": 11 + }, + + { + "id": "210073906", + "label": "Remote Massachusetts", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b1b2837386c65", + "count": 10 + }, + + { + "id": "210073907", + "label": "Stockholm", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d202253936de7", + "count": 8 + }, + + { + "id": "210073908", + "label": "Remote Pennsylvania", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84c9f2b25506d15", + "count": 8 + }, + + { + "id": "210073909", + "label": "Minneapolis", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84abf3e01756bc0", + "count": 8 + }, + + { + "id": "210073910", + "label": "Remote Virginia", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b6b69595f6cec", + "count": 7 + }, + + { + "id": "210073911", + "label": "Remote North Carolina", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84adb626ed56bf2", + "count": 7 + }, + + { + "id": "210073912", + "label": "Remote Maryland", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b1e08b71f6c6a", + "count": 6 + }, + + { + "id": "210073913", + "label": "Remote Connecticut", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ad05d3d1e6bde", + "count": 6 + }, + + { + "id": "210073914", + "label": "Seoul", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ca7e048d96d24", + "count": 4 + }, + + { + "id": "210073915", + "label": "Amsterdam", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d49a246ad6e2d", + "count": 4 + }, + + { + "id": "210073916", + "label": "Rome", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ca235d1e96d1a", + "count": 3 + }, + + { + "id": "210073917", + "label": "Remote India", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b027718b86c38", + "count": 3 + }, + + { + "id": "210073918", + "label": "Madrid", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ac20cc3806bc5", + "count": 3 + }, + + { + "id": "210073919", + "label": "Edinburgh", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84ae0da81726bfc", + "count": 3 + }, + + { + "id": "210073920", + "label": "Berlin", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "eb75665d31e4106294846e29ef4482f6", + "count": 3 + }, + + { + "id": "210073921", + "label": "Zurich", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d26374bca6df1", + "count": 2 + }, + + { + "id": "210073922", + "label": "Shanghai", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "c5c6352929d2108d386898252080b069", + "count": 2 + }, + + { + "id": "210073923", + "label": "Remote Utah", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b76b36f6e6d00", + "count": 2 + }, + + { + "id": "210073924", + "label": "Remote Netherlands", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "d9675dc7168d100e5e7100e776135b25", + "count": 2 + }, + + { + "id": "210073925", + "label": "Remote Michigan", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84b458c35c16cb0", + "count": 2 + }, + + { + "id": "210073926", + "label": "Remote Canada", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d293675816df6", + "count": 2 + }, + + { + "id": "210073927", + "label": "Remote Australia", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84af77f56726c24", + "count": 2 + }, + + { + "id": "210073928", + "label": "Detroit", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "c5c6352929d2108d383d24b260e9b004", + "count": 2 + }, + + { + "id": "210073929", + "label": "Basel", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100bc84d8c22e3c36e9b", + "count": 2 + } + ], + + "expandState": "" + }, + + { + "id": "210073930", + "label": "Job Type", + "widget": "facet", + "ecid": "facet.facet.FacetParameter", + "iid": "workerSubType", + "facetValues": + [ + { + "id": "210073931", + "label": "Regular", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100b2f8d06b0870c6c8b", + "count": 833 + }, + + { + "id": "210073932", + "label": "Adobe Paid Temp (Fixed Term)", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100b2f8d07bcad316c8d", + "count": 17 + }, + + { + "id": "210073933", + "label": "Agency Temp", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100b2f8f1fda22686c8f", + "count": 9 + }, + + { + "id": "210073934", + "label": "Intern (Fixed Term)", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100b2f8d08d56d8d6c8e", + "count": 6 + }, + + { + "id": "210073935", + "label": "Independent Contractor - Badge + Network", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "3ba4ecdf4893100b2f8f22cd3aae6c93", + "count": 1 + } + ], + + "expandState": "" + }, + + { + "id": "210073936", + "label": "Full/Part-time", + "widget": "facet", + "ecid": "facet.facet.FacetParameter", + "iid": "timeType", + "facetValues": + [ + { + "id": "210073937", + "label": "Full time", + "widget": "facetValue", + "ecid": "facetValue.facetValue['FacetInstance_Reference_-_WID']", + "iid": "262714769a02100a80d2a64ac4e040c0", + "count": 866 + } + ], + + "expandState": "" + } + ] + }, + + "children": + [ + { + "id": "210073945", + "widget": "facetSearchResultList", + "ecid": "facetSearchResultList.newFacetSearch.Report_Entry", + "listItems": + [ + { + "id": "210073948", + "widget": "templatedListItem", + "ecid": "templatedListItem.210073946", + "title": + { + "id": "210073946", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210073947", + "widget": "moniker", + "text": "Microsoft Dynamics Developer, Customer Experience Tech Services", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Microsoft-Dynamics-Developer--Customer-Experience-Tech-Services_59889", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210073949", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210073950", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210073951", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210073952", + "widget": "moniker", + "text": "59889" + } + ], + + "multiSelect": true + }, + + { + "id": "210073953", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210073954", + "widget": "moniker", + "text": "Posted 8 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210073957", + "widget": "templatedListItem", + "ecid": "templatedListItem.210073955", + "title": + { + "id": "210073955", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210073956", + "widget": "moniker", + "text": "Customer Success Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Sydney/Customer-Success-Engineer_58388", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210073958", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210073959", + "widget": "moniker", + "text": "Sydney" + } + ], + + "multiSelect": true + }, + + { + "id": "210073960", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210073961", + "widget": "moniker", + "text": "58388" + } + ], + + "multiSelect": true + }, + + { + "id": "210073962", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210073963", + "widget": "moniker", + "text": "Posted 21 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210073966", + "widget": "templatedListItem", + "ecid": "templatedListItem.210073964", + "title": + { + "id": "210073964", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210073965", + "widget": "moniker", + "text": "Senior Business Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Remote-China/Senior-Business-Consultant_58661", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210073967", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210073968", + "widget": "moniker", + "text": "Remote China" + } + ], + + "multiSelect": true + }, + + { + "id": "210073969", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210073970", + "widget": "moniker", + "text": "58661" + } + ], + + "multiSelect": true + }, + + { + "id": "210073971", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210073972", + "widget": "moniker", + "text": "Posted 23 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210073975", + "widget": "templatedListItem", + "ecid": "templatedListItem.210073973", + "title": + { + "id": "210073973", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210073974", + "widget": "moniker", + "text": "Strategic Account Executive - Insurance Industry", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Munich/Strategic-Account-Executive---Insurance-Industry_58935-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210073976", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210073977", + "widget": "moniker", + "text": "Munich, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210073978", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210073979", + "widget": "moniker", + "text": "58935" + } + ], + + "multiSelect": true + }, + + { + "id": "210073980", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210073981", + "widget": "moniker", + "text": "Posted 24 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210073984", + "widget": "templatedListItem", + "ecid": "templatedListItem.210073982", + "title": + { + "id": "210073982", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210073983", + "widget": "moniker", + "text": "Solution Architect", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Sydney/Solution-Architect_58231", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210073985", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210073986", + "widget": "moniker", + "text": "Sydney" + } + ], + + "multiSelect": true + }, + + { + "id": "210073987", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210073988", + "widget": "moniker", + "text": "58231" + } + ], + + "multiSelect": true + }, + + { + "id": "210073989", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210073990", + "widget": "moniker", + "text": "Posted 24 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210073993", + "widget": "templatedListItem", + "ecid": "templatedListItem.210073991", + "title": + { + "id": "210073991", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210073992", + "widget": "moniker", + "text": "Senior Software Engineer, Mobile", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Software-Engineer--Mobile-iOS-Development_56944", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210073994", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210073995", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210073996", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210073997", + "widget": "moniker", + "text": "56944" + } + ], + + "multiSelect": true + }, + + { + "id": "210073998", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210073999", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074002", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074000", + "title": + { + "id": "210074000", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074001", + "widget": "moniker", + "text": "Professional Services Sales - Federal", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Remote-Australia/Professional-Services-Sales---Federal_58228-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074003", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074004", + "widget": "moniker", + "text": "Remote Australia" + } + ], + + "multiSelect": true + }, + + { + "id": "210074005", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074006", + "widget": "moniker", + "text": "58228" + } + ], + + "multiSelect": true + }, + + { + "id": "210074007", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074008", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074011", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074009", + "title": + { + "id": "210074009", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074010", + "widget": "moniker", + "text": "Professional Services Sales - State Gov", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Sydney/Professional-Services-Sales---State-Gov_58229", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074012", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074013", + "widget": "moniker", + "text": "Sydney" + } + ], + + "multiSelect": true + }, + + { + "id": "210074014", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074015", + "widget": "moniker", + "text": "58229" + } + ], + + "multiSelect": true + }, + + { + "id": "210074016", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074017", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074020", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074018", + "title": + { + "id": "210074018", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074019", + "widget": "moniker", + "text": "Senior Solution Consultant - Adobe Experience Cloud", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Singapore/Senior-Solution-Consultant---Adobe-Experience-Cloud_58654-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074021", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074022", + "widget": "moniker", + "text": "Singapore" + } + ], + + "multiSelect": true + }, + + { + "id": "210074023", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074024", + "widget": "moniker", + "text": "58654" + } + ], + + "multiSelect": true + }, + + { + "id": "210074025", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074026", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074029", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074027", + "title": + { + "id": "210074027", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074028", + "widget": "moniker", + "text": "Senior Campaign Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Hong-Kong/Senior-Campaign-Consultant_58647", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074030", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074031", + "widget": "moniker", + "text": "Hong Kong" + } + ], + + "multiSelect": true + }, + + { + "id": "210074032", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074033", + "widget": "moniker", + "text": "58647" + } + ], + + "multiSelect": true + }, + + { + "id": "210074034", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074035", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074038", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074036", + "title": + { + "id": "210074036", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074037", + "widget": "moniker", + "text": "Senior Campaign Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Shanghai/Senior-Campaign-Consultant_58651", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074039", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074040", + "widget": "moniker", + "text": "Shanghai" + } + ], + + "multiSelect": true + }, + + { + "id": "210074041", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074042", + "widget": "moniker", + "text": "58651" + } + ], + + "multiSelect": true + }, + + { + "id": "210074043", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074044", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074047", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074045", + "title": + { + "id": "210074045", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074046", + "widget": "moniker", + "text": "Team Lead, Engineering Response Team", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Team-Lead--Engineering-Response-Team_58318-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074048", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074049", + "widget": "moniker", + "text": "San Francisco, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074050", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074051", + "widget": "moniker", + "text": "58318" + } + ], + + "multiSelect": true + }, + + { + "id": "210074052", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074053", + "widget": "moniker", + "text": "Posted 30+ Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074056", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074054", + "title": + { + "id": "210074054", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074055", + "widget": "moniker", + "text": "Senior Technical Program Manager (TPM)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Senior-Technical-Program-Manager--TPM-_60230", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074057", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074058", + "widget": "moniker", + "text": "San Jose, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074059", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074060", + "widget": "moniker", + "text": "60230" + } + ], + + "multiSelect": true + }, + + { + "id": "210074061", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074062", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074065", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074063", + "title": + { + "id": "210074063", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074064", + "widget": "moniker", + "text": "Data Scientist, Machine Learning Evaluation", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Data-Scientist--Machine-Learning-Evaluation_60531", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074066", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074067", + "widget": "moniker", + "text": "San Francisco, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074068", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074069", + "widget": "moniker", + "text": "60531" + } + ], + + "multiSelect": true + }, + + { + "id": "210074070", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074071", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074074", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074072", + "title": + { + "id": "210074072", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074073", + "widget": "moniker", + "text": "Applied Scientist, Machine Learning", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Applied-Scientist--Machine-Learning_60235", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074075", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074076", + "widget": "moniker", + "text": "San Francisco, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074077", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074078", + "widget": "moniker", + "text": "60235" + } + ], + + "multiSelect": true + }, + + { + "id": "210074079", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074080", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074083", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074081", + "title": + { + "id": "210074081", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074082", + "widget": "moniker", + "text": "Senior Applied Scientist, Machine Learning", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Senior-Applied-Scientist--Machine-Learning_60108-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074084", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074085", + "widget": "moniker", + "text": "San Francisco, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074086", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074087", + "widget": "moniker", + "text": "60108" + } + ], + + "multiSelect": true + }, + + { + "id": "210074088", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074089", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074092", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074090", + "title": + { + "id": "210074090", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074091", + "widget": "moniker", + "text": "Inside Sales Specialist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Inside-Sales-Specialist_60046", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074093", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074094", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074095", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074096", + "widget": "moniker", + "text": "60046" + } + ], + + "multiSelect": true + }, + + { + "id": "210074097", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074098", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074101", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074099", + "title": + { + "id": "210074099", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074100", + "widget": "moniker", + "text": "Photoshop Quality Engineering Developer, C++/iOS", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Quality-Engineering-Developer--C---iOS_60073", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074102", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074103", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074104", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074105", + "widget": "moniker", + "text": "60073" + } + ], + + "multiSelect": true + }, + + { + "id": "210074106", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074107", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074110", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074108", + "title": + { + "id": "210074108", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074109", + "widget": "moniker", + "text": "Director, Monetization Strategy for Adobe Experience Cloud", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Director--Monetization-Strategy-for-Adobe-Experience-Cloud_56441-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074111", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074112", + "widget": "moniker", + "text": "San Jose, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074113", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074114", + "widget": "moniker", + "text": "56441" + } + ], + + "multiSelect": true + }, + + { + "id": "210074115", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074116", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074119", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074117", + "title": + { + "id": "210074117", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074118", + "widget": "moniker", + "text": "Photoshop Developer, iOS", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Developer--iOS_60072", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074120", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074121", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074122", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074123", + "widget": "moniker", + "text": "60072" + } + ], + + "multiSelect": true + }, + + { + "id": "210074124", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074125", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074128", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074126", + "title": + { + "id": "210074126", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074127", + "widget": "moniker", + "text": "Photoshop Senior Developer, C++", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Senior-Developer--C--_60071", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074129", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074130", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074131", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074132", + "widget": "moniker", + "text": "60071" + } + ], + + "multiSelect": true + }, + + { + "id": "210074133", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074134", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074137", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074135", + "title": + { + "id": "210074135", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074136", + "widget": "moniker", + "text": "Photoshop Quality Engineer, iOS", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Quality-Engineer--iOS_60070", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074138", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074139", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074140", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074141", + "widget": "moniker", + "text": "60070" + } + ], + + "multiSelect": true + }, + + { + "id": "210074142", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074143", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074146", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074144", + "title": + { + "id": "210074144", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074145", + "widget": "moniker", + "text": "Photoshop Senior iOS Developer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Senior-iOS-Developer_60069", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074147", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074148", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074149", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074150", + "widget": "moniker", + "text": "60069" + } + ], + + "multiSelect": true + }, + + { + "id": "210074151", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074152", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074155", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074153", + "title": + { + "id": "210074153", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074154", + "widget": "moniker", + "text": "Photoshop Automation Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Photoshop-Automation-Engineer_60095", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074156", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074157", + "widget": "moniker", + "text": "New York, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074158", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074159", + "widget": "moniker", + "text": "60095" + } + ], + + "multiSelect": true + }, + + { + "id": "210074160", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074161", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074164", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074162", + "title": + { + "id": "210074162", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074163", + "widget": "moniker", + "text": "Data Scientist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Data-Scientist_58226", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074165", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074166", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074167", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074168", + "widget": "moniker", + "text": "58226" + } + ], + + "multiSelect": true + }, + + { + "id": "210074169", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074170", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074173", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074171", + "title": + { + "id": "210074171", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074172", + "widget": "moniker", + "text": "Creative Cloud On-boarding Architect", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Creative-Cloud-On-boarding-Architect_60126", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074174", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074175", + "widget": "moniker", + "text": "San Francisco, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074176", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074177", + "widget": "moniker", + "text": "60126" + } + ], + + "multiSelect": true + }, + + { + "id": "210074178", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074179", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074182", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074180", + "title": + { + "id": "210074180", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074181", + "widget": "moniker", + "text": "Senior Program Manager, Cross Cloud Customer Experience", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Senior-Program-Manager_56094", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074183", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074184", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074185", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074186", + "widget": "moniker", + "text": "56094" + } + ], + + "multiSelect": true + }, + + { + "id": "210074187", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074188", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074191", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074189", + "title": + { + "id": "210074189", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074190", + "widget": "moniker", + "text": "Senior Data Scientist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Emeryville/Senior-Data-Scientist_56559-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074192", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074193", + "widget": "moniker", + "text": "Emeryville" + } + ], + + "multiSelect": true + }, + + { + "id": "210074194", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074195", + "widget": "moniker", + "text": "56559" + } + ], + + "multiSelect": true + }, + + { + "id": "210074196", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074197", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074200", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074198", + "title": + { + "id": "210074198", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074199", + "widget": "moniker", + "text": "Sr Experience Designer, AR/VR", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Seattle/Sr-Experience-Designer--AR-VR_59400", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074201", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074202", + "widget": "moniker", + "text": "Seattle" + } + ], + + "multiSelect": true + }, + + { + "id": "210074203", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074204", + "widget": "moniker", + "text": "59400" + } + ], + + "multiSelect": true + }, + + { + "id": "210074205", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074206", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074209", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074207", + "title": + { + "id": "210074207", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074208", + "widget": "moniker", + "text": "Sr. Site Reliability Engineer (Postgres)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Boston/Sr-Site-Reliability-Engineer_58560-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074210", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074211", + "widget": "moniker", + "text": "Boston" + } + ], + + "multiSelect": true + }, + + { + "id": "210074212", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074213", + "widget": "moniker", + "text": "58560" + } + ], + + "multiSelect": true + }, + + { + "id": "210074214", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074215", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074218", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074216", + "title": + { + "id": "210074216", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074217", + "widget": "moniker", + "text": "Data Analytics Manager, Go-to-Market", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Customer-Acquisition-Data-Engineer--Digital-Media-Go-to-Market_52373-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074219", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074220", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074221", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074222", + "widget": "moniker", + "text": "52373" + } + ], + + "multiSelect": true + }, + + { + "id": "210074223", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074224", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074227", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074225", + "title": + { + "id": "210074225", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074226", + "widget": "moniker", + "text": "Campaign Technical Support Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Campaign-Technical-Support-Consultant_59861", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074228", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074229", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074230", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074231", + "widget": "moniker", + "text": "59861" + } + ], + + "multiSelect": true + }, + + { + "id": "210074232", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074233", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074236", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074234", + "title": + { + "id": "210074234", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074235", + "widget": "moniker", + "text": "Sr. Program Manager, Business Operations - XDC", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Program-Manager--Adobe-Experience-Design--XD--Team_55098", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074237", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074238", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074239", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074240", + "widget": "moniker", + "text": "55098" + } + ], + + "multiSelect": true + }, + + { + "id": "210074241", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074242", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074245", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074243", + "title": + { + "id": "210074243", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074244", + "widget": "moniker", + "text": "Senior Security Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Lehi/Senior-Security-Engineer_59815-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074246", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074247", + "widget": "moniker", + "text": "Lehi" + } + ], + + "multiSelect": true + }, + + { + "id": "210074248", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074249", + "widget": "moniker", + "text": "59815" + } + ], + + "multiSelect": true + }, + + { + "id": "210074250", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074251", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074254", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074252", + "title": + { + "id": "210074252", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074253", + "widget": "moniker", + "text": "Software Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Lehi/Software-Engineer_59879-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074255", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074256", + "widget": "moniker", + "text": "Lehi" + } + ], + + "multiSelect": true + }, + + { + "id": "210074257", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074258", + "widget": "moniker", + "text": "59879" + } + ], + + "multiSelect": true + }, + + { + "id": "210074259", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074260", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074263", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074261", + "title": + { + "id": "210074261", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074262", + "widget": "moniker", + "text": "Role: Sr. Enterprise Account Executive, Creative Cloud – Media & Entertainment", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Named-Account-Manager--Media---Entertainment_58487-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074264", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074265", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074266", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074267", + "widget": "moniker", + "text": "58487" + } + ], + + "multiSelect": true + }, + + { + "id": "210074268", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074269", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074272", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074270", + "title": + { + "id": "210074270", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074271", + "widget": "moniker", + "text": "Senior Customer Success Manager - Healthcare", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Senior-Customer-Success-Manager_57803-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074273", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074274", + "widget": "moniker", + "text": "New York" + } + ], + + "multiSelect": true + }, + + { + "id": "210074275", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074276", + "widget": "moniker", + "text": "57803" + } + ], + + "multiSelect": true + }, + + { + "id": "210074277", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074278", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074281", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074279", + "title": + { + "id": "210074279", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074280", + "widget": "moniker", + "text": "CloudOps DBA", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Noida/CloudOps-DBA_60207", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074282", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074283", + "widget": "moniker", + "text": "Noida" + } + ], + + "multiSelect": true + }, + + { + "id": "210074284", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074285", + "widget": "moniker", + "text": "60207" + } + ], + + "multiSelect": true + }, + + { + "id": "210074286", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074287", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074290", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074288", + "title": + { + "id": "210074288", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074289", + "widget": "moniker", + "text": "Senior Customer Success Manager - Financial Services & Insurance", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Senior-Customer-Success-Manager---Financial-Services---Insurance_59615-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074291", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074292", + "widget": "moniker", + "text": "New York" + } + ], + + "multiSelect": true + }, + + { + "id": "210074293", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074294", + "widget": "moniker", + "text": "59615" + } + ], + + "multiSelect": true + }, + + { + "id": "210074295", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074296", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074299", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074297", + "title": + { + "id": "210074297", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074298", + "widget": "moniker", + "text": "Senior Customer Success Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Stockholm/Senior-Customer-Success-Manager_60506", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074300", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074301", + "widget": "moniker", + "text": "Stockholm" + } + ], + + "multiSelect": true + }, + + { + "id": "210074302", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074303", + "widget": "moniker", + "text": "60506" + } + ], + + "multiSelect": true + }, + + { + "id": "210074304", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074305", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074308", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074306", + "title": + { + "id": "210074306", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074307", + "widget": "moniker", + "text": "Senior Consultant, AEM", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/London/Senior-Consultant--AEM_60509-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074309", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074310", + "widget": "moniker", + "text": "London" + } + ], + + "multiSelect": true + }, + + { + "id": "210074311", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074312", + "widget": "moniker", + "text": "60509" + } + ], + + "multiSelect": true + }, + + { + "id": "210074313", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074314", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074317", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074315", + "title": + { + "id": "210074315", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074316", + "widget": "moniker", + "text": "Marketing Campaign Intern (12 month placement year)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Maidenhead/Marketing-Campaign-Intern--12-month-placement-year-_60094", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074318", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074319", + "widget": "moniker", + "text": "Maidenhead" + } + ], + + "multiSelect": true + }, + + { + "id": "210074320", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074321", + "widget": "moniker", + "text": "60094" + } + ], + + "multiSelect": true + }, + + { + "id": "210074322", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074323", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074326", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074324", + "title": + { + "id": "210074324", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074325", + "widget": "moniker", + "text": "Photoshop Quality Engineer, iOS", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Quality-Engineer--iOS_60074", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074327", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074328", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074329", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074330", + "widget": "moniker", + "text": "60074" + } + ], + + "multiSelect": true + }, + + { + "id": "210074331", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074332", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074335", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074333", + "title": + { + "id": "210074333", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074334", + "widget": "moniker", + "text": "Photoshop Development Manager, Android", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Photoshop-Development-Manager--Android_60076", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074336", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074337", + "widget": "moniker", + "text": "New York" + } + ], + + "multiSelect": true + }, + + { + "id": "210074338", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074339", + "widget": "moniker", + "text": "60076" + } + ], + + "multiSelect": true + }, + + { + "id": "210074340", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074341", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074344", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074342", + "title": + { + "id": "210074342", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074343", + "widget": "moniker", + "text": "Photoshop Quality Engineering Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Photoshop-Quality-Engineering-Manager_60075", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074345", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074346", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074347", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074348", + "widget": "moniker", + "text": "60075" + } + ], + + "multiSelect": true + }, + + { + "id": "210074349", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074350", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074353", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074351", + "title": + { + "id": "210074351", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074352", + "widget": "moniker", + "text": "Customer Success Manager - DMA", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Noida/Customer-Success-Manager---DMA_58763", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074354", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074355", + "widget": "moniker", + "text": "Noida" + } + ], + + "multiSelect": true + }, + + { + "id": "210074356", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074357", + "widget": "moniker", + "text": "58763" + } + ], + + "multiSelect": true + }, + + { + "id": "210074358", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074359", + "widget": "moniker", + "text": "Posted Today" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074362", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074360", + "title": + { + "id": "210074360", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074361", + "widget": "moniker", + "text": "Senior Campaign Journey Manager, Document Cloud", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Senior-Campaign-Journey-Manager--Document-Cloud_59939", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074363", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074364", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074365", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074366", + "widget": "moniker", + "text": "59939" + } + ], + + "multiSelect": true + }, + + { + "id": "210074367", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074368", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074371", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074369", + "title": + { + "id": "210074369", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074370", + "widget": "moniker", + "text": "Software Architect, Data Systems", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Emeryville/Software-Architect--Data-Systems_59460", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074372", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074373", + "widget": "moniker", + "text": "Emeryville" + } + ], + + "multiSelect": true + }, + + { + "id": "210074374", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074375", + "widget": "moniker", + "text": "59460" + } + ], + + "multiSelect": true + }, + + { + "id": "210074376", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074377", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074380", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074378", + "title": + { + "id": "210074378", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074379", + "widget": "moniker", + "text": "Lead Software Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Emeryville/Lead-Software-Engineer_52914", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074381", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074382", + "widget": "moniker", + "text": "Emeryville" + } + ], + + "multiSelect": true + }, + + { + "id": "210074383", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074384", + "widget": "moniker", + "text": "52914" + } + ], + + "multiSelect": true + }, + + { + "id": "210074385", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074386", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074389", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074387", + "title": + { + "id": "210074387", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074388", + "widget": "moniker", + "text": "Sr. Software Architect", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Sr-Software-Architect_59788", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074390", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074391", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074392", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074393", + "widget": "moniker", + "text": "59788" + } + ], + + "multiSelect": true + }, + + { + "id": "210074394", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074395", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074398", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074396", + "title": + { + "id": "210074396", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074397", + "widget": "moniker", + "text": "Senior Program Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Senior-Program-Manager_60008-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074399", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074400", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074401", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074402", + "widget": "moniker", + "text": "60008" + } + ], + + "multiSelect": true + }, + + { + "id": "210074403", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074404", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074407", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074405", + "title": + { + "id": "210074405", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074406", + "widget": "moniker", + "text": "Sr. Solution Architect", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Sr-Solution-Architect_59789-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074408", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074409", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074410", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074411", + "widget": "moniker", + "text": "59789" + } + ], + + "multiSelect": true + }, + + { + "id": "210074412", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074413", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074416", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074414", + "title": + { + "id": "210074414", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074415", + "widget": "moniker", + "text": "Sr. Product Specialist - Campaign - Retail", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Remote-California/Sr-Product-Specialist---Campaign----Retail_60042", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074417", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074418", + "widget": "moniker", + "text": "Remote California, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074419", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074420", + "widget": "moniker", + "text": "60042" + } + ], + + "multiSelect": true + }, + + { + "id": "210074421", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074422", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074425", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074423", + "title": + { + "id": "210074423", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074424", + "widget": "moniker", + "text": "Werkstudent Account Development Management", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Munich/Werkstudent-Account-Development-Management_59551", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074426", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074427", + "widget": "moniker", + "text": "Munich" + } + ], + + "multiSelect": true + }, + + { + "id": "210074428", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074429", + "widget": "moniker", + "text": "59551" + } + ], + + "multiSelect": true + }, + + { + "id": "210074430", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074431", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074434", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074432", + "title": + { + "id": "210074432", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074433", + "widget": "moniker", + "text": "Digital Transformation Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/London/Digital-Transformation-Consultant_59314", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074435", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074436", + "widget": "moniker", + "text": "London, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074437", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074438", + "widget": "moniker", + "text": "59314" + } + ], + + "multiSelect": true + }, + + { + "id": "210074439", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074440", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074443", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074441", + "title": + { + "id": "210074441", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074442", + "widget": "moniker", + "text": "Advertising Cloud Marketing Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/London/Advertising-Cloud-Marketing-Manager_60213", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074444", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074445", + "widget": "moniker", + "text": "London" + } + ], + + "multiSelect": true + }, + + { + "id": "210074446", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074447", + "widget": "moniker", + "text": "60213" + } + ], + + "multiSelect": true + }, + + { + "id": "210074448", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074449", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074452", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074450", + "title": + { + "id": "210074450", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074451", + "widget": "moniker", + "text": "Associate Legal Specialist, Global Legal Services", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Maidenhead/Associate-Legal-Specialist--Global-Legal-Services_57174-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074453", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074454", + "widget": "moniker", + "text": "Maidenhead" + } + ], + + "multiSelect": true + }, + + { + "id": "210074455", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074456", + "widget": "moniker", + "text": "57174" + } + ], + + "multiSelect": true + }, + + { + "id": "210074457", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074458", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074461", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074459", + "title": + { + "id": "210074459", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074460", + "widget": "moniker", + "text": "Sr. Solutions Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Sr-Solutions-Consultant_60057-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074462", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074463", + "widget": "moniker", + "text": "New York, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074464", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074465", + "widget": "moniker", + "text": "60057" + } + ], + + "multiSelect": true + }, + + { + "id": "210074466", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074467", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074470", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074468", + "title": + { + "id": "210074468", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074469", + "widget": "moniker", + "text": "Sr. Computer Scientist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Developer---Typekit_53125", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074471", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074472", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074473", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074474", + "widget": "moniker", + "text": "53125" + } + ], + + "multiSelect": true + }, + + { + "id": "210074475", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074476", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074479", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074477", + "title": + { + "id": "210074477", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074478", + "widget": "moniker", + "text": "Project Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Remote-Hamburg/Project-Manager_60162-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074480", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074481", + "widget": "moniker", + "text": "Remote Hamburg" + } + ], + + "multiSelect": true + }, + + { + "id": "210074482", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074483", + "widget": "moniker", + "text": "60162" + } + ], + + "multiSelect": true + }, + + { + "id": "210074484", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074485", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074488", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074486", + "title": + { + "id": "210074486", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074487", + "widget": "moniker", + "text": "Senior Account Manager, Professional Service Sales", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Stockholm/Senior-Account-Manager--Professional-Service-Sales_59545", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074489", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074490", + "widget": "moniker", + "text": "Stockholm" + } + ], + + "multiSelect": true + }, + + { + "id": "210074491", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074492", + "widget": "moniker", + "text": "59545" + } + ], + + "multiSelect": true + }, + + { + "id": "210074493", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074494", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074497", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074495", + "title": + { + "id": "210074495", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074496", + "widget": "moniker", + "text": "Practice Director, Media Entertainment & Telecom, Digital Strategy Group", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Lead-Digital-Strategist_54232-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074498", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074499", + "widget": "moniker", + "text": "New York, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074500", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074501", + "widget": "moniker", + "text": "54232" + } + ], + + "multiSelect": true + }, + + { + "id": "210074502", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074503", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074506", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074504", + "title": + { + "id": "210074504", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074505", + "widget": "moniker", + "text": "Account Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/London/Account-Manager_54970", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074507", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074508", + "widget": "moniker", + "text": "London" + } + ], + + "multiSelect": true + }, + + { + "id": "210074509", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074510", + "widget": "moniker", + "text": "54970" + } + ], + + "multiSelect": true + }, + + { + "id": "210074511", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074512", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074515", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074513", + "title": + { + "id": "210074513", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074514", + "widget": "moniker", + "text": "Global Practice Manager, Digital Strategy Group", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Practice-Manager--Digital-Strategy-Group_53669-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074516", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074517", + "widget": "moniker", + "text": "San Francisco, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074518", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074519", + "widget": "moniker", + "text": "53669" + } + ], + + "multiSelect": true + }, + + { + "id": "210074520", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074521", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074524", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074522", + "title": + { + "id": "210074522", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074523", + "widget": "moniker", + "text": "Senior Consultant - Analytics, Target & Audience Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/London/Senior-Consultant----Analytics--Target---Audience-Manager_59053", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074525", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074526", + "widget": "moniker", + "text": "London" + } + ], + + "multiSelect": true + }, + + { + "id": "210074527", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074528", + "widget": "moniker", + "text": "59053" + } + ], + + "multiSelect": true + }, + + { + "id": "210074529", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074530", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074533", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074531", + "title": + { + "id": "210074531", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074532", + "widget": "moniker", + "text": "Renewals and Retention Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Singapore/Renewals-and-Retention-Manager_59908-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074534", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074535", + "widget": "moniker", + "text": "Singapore" + } + ], + + "multiSelect": true + }, + + { + "id": "210074536", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074537", + "widget": "moniker", + "text": "59908" + } + ], + + "multiSelect": true + }, + + { + "id": "210074538", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074539", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074542", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074540", + "title": + { + "id": "210074540", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074541", + "widget": "moniker", + "text": "Deliverability Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Paris/Deliverability-Consultant_58866-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074543", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074544", + "widget": "moniker", + "text": "Paris" + } + ], + + "multiSelect": true + }, + + { + "id": "210074545", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074546", + "widget": "moniker", + "text": "58866" + } + ], + + "multiSelect": true + }, + + { + "id": "210074547", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074548", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074551", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074549", + "title": + { + "id": "210074549", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074550", + "widget": "moniker", + "text": "Technical Support", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Bucharest/Sr-Computer-Scientist-1_57647", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074552", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074553", + "widget": "moniker", + "text": "Bucharest" + } + ], + + "multiSelect": true + }, + + { + "id": "210074554", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074555", + "widget": "moniker", + "text": "57647" + } + ], + + "multiSelect": true + }, + + { + "id": "210074556", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074557", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074560", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074558", + "title": + { + "id": "210074558", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074559", + "widget": "moniker", + "text": "Customer Support Representative (6 month FTC)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Berlin/Customer-Support-Representative--6-month-FTC-_59992-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074561", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074562", + "widget": "moniker", + "text": "Berlin" + } + ], + + "multiSelect": true + }, + + { + "id": "210074563", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074564", + "widget": "moniker", + "text": "59992" + } + ], + + "multiSelect": true + }, + + { + "id": "210074565", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074566", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074569", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074567", + "title": + { + "id": "210074567", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074568", + "widget": "moniker", + "text": "Customer Support Representative (6 month FTC)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Berlin/Customer-Support-Representative--6-month-FTC-_59991-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074570", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074571", + "widget": "moniker", + "text": "Berlin" + } + ], + + "multiSelect": true + }, + + { + "id": "210074572", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074573", + "widget": "moniker", + "text": "59991" + } + ], + + "multiSelect": true + }, + + { + "id": "210074574", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074575", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074578", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074576", + "title": + { + "id": "210074576", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074577", + "widget": "moniker", + "text": "Member of Technical Staff", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Bangalore/Natural-language-processing--NLP----Computer-Scientist_52344", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074579", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074580", + "widget": "moniker", + "text": "Bangalore" + } + ], + + "multiSelect": true + }, + + { + "id": "210074581", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074582", + "widget": "moniker", + "text": "52344" + } + ], + + "multiSelect": true + }, + + { + "id": "210074583", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074584", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074587", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074585", + "title": + { + "id": "210074585", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074586", + "widget": "moniker", + "text": "Senior Manager, Web Optimization & Analytics", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Tokyo/Senior-Manager--Web-Optimization---Analytics_60022-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074588", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074589", + "widget": "moniker", + "text": "Tokyo" + } + ], + + "multiSelect": true + }, + + { + "id": "210074590", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074591", + "widget": "moniker", + "text": "60022" + } + ], + + "multiSelect": true + }, + + { + "id": "210074592", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074593", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074596", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074594", + "title": + { + "id": "210074594", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074595", + "widget": "moniker", + "text": "Public Sector Sales Operations Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Sydney/Public-Sector-Sales-Operations-Manager_60170-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074597", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074598", + "widget": "moniker", + "text": "Sydney" + } + ], + + "multiSelect": true + }, + + { + "id": "210074599", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074600", + "widget": "moniker", + "text": "60170" + } + ], + + "multiSelect": true + }, + + { + "id": "210074601", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074602", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074605", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074603", + "title": + { + "id": "210074603", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074604", + "widget": "moniker", + "text": "Software QE Developer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Software-QE-Developer_59820-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074606", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074607", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074608", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074609", + "widget": "moniker", + "text": "59820" + } + ], + + "multiSelect": true + }, + + { + "id": "210074610", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074611", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074614", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074612", + "title": + { + "id": "210074612", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074613", + "widget": "moniker", + "text": "Sr Computer Scientist - Augmented Reality", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Sr-Computer-Scientist---Augmented-Reality_57899", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074615", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074616", + "widget": "moniker", + "text": "San Jose, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074617", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074618", + "widget": "moniker", + "text": "57899" + } + ], + + "multiSelect": true + }, + + { + "id": "210074619", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074620", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074623", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074621", + "title": + { + "id": "210074621", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074622", + "widget": "moniker", + "text": "Senior Consultant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Remote-Australia/Senior-Consultant_57314", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074624", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074625", + "widget": "moniker", + "text": "Remote Australia" + } + ], + + "multiSelect": true + }, + + { + "id": "210074626", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074627", + "widget": "moniker", + "text": "57314" + } + ], + + "multiSelect": true + }, + + { + "id": "210074628", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074629", + "widget": "moniker", + "text": "Posted Yesterday" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074632", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074630", + "title": + { + "id": "210074630", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074631", + "widget": "moniker", + "text": "Software Developer, Creative Mobile Drawing", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Software-Developer--Creative-Mobile-Drawing_60018", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074633", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074634", + "widget": "moniker", + "text": "San Jose, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074635", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074636", + "widget": "moniker", + "text": "60018" + } + ], + + "multiSelect": true + }, + + { + "id": "210074637", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074638", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074641", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074639", + "title": + { + "id": "210074639", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074640", + "widget": "moniker", + "text": "Self Service Excellence Computer Scientist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Self-Service-Excellence-Computer-Scientist_60050-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074642", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074643", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074644", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074645", + "widget": "moniker", + "text": "60050" + } + ], + + "multiSelect": true + }, + + { + "id": "210074646", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074647", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074650", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074648", + "title": + { + "id": "210074648", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074649", + "widget": "moniker", + "text": "Account executive, inside sales, Adobe Sign", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Account-executive--inside-sales--Adobe-Sign_57861", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074651", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074652", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074653", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074654", + "widget": "moniker", + "text": "57861" + } + ], + + "multiSelect": true + }, + + { + "id": "210074655", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074656", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074659", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074657", + "title": + { + "id": "210074657", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074658", + "widget": "moniker", + "text": "Senior Product Manager – Adobe.com and Marketing Technology", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Senior-Product-Manager---Content-and-Asset-Management_57039", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074660", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074661", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074662", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074663", + "widget": "moniker", + "text": "57039" + } + ], + + "multiSelect": true + }, + + { + "id": "210074664", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074665", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074668", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074666", + "title": + { + "id": "210074666", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074667", + "widget": "moniker", + "text": "Demo Program Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Remote-Michigan/Demo-Program-Manager_59129-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074669", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074670", + "widget": "moniker", + "text": "Remote Michigan" + } + ], + + "multiSelect": true + }, + + { + "id": "210074671", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074672", + "widget": "moniker", + "text": "59129" + } + ], + + "multiSelect": true + }, + + { + "id": "210074673", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074674", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074677", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074675", + "title": + { + "id": "210074675", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074676", + "widget": "moniker", + "text": "Software Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Emeryville/Software-Engineer_59461", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074678", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074679", + "widget": "moniker", + "text": "Emeryville" + } + ], + + "multiSelect": true + }, + + { + "id": "210074680", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074681", + "widget": "moniker", + "text": "59461" + } + ], + + "multiSelect": true + }, + + { + "id": "210074682", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074683", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074686", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074684", + "title": + { + "id": "210074684", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074685", + "widget": "moniker", + "text": "Inside Sales Specialist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Francisco/Inside-Sales-Specialist_60125", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074687", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074688", + "widget": "moniker", + "text": "San Francisco" + } + ], + + "multiSelect": true + }, + + { + "id": "210074689", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074690", + "widget": "moniker", + "text": "60125" + } + ], + + "multiSelect": true + }, + + { + "id": "210074691", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074692", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074695", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074693", + "title": + { + "id": "210074693", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074694", + "widget": "moniker", + "text": "Senior Software Engineer - Core Services (UI/Front End)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Software-Engineer---Core-Services--UI-Front-End-_55833", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074696", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074697", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074698", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074699", + "widget": "moniker", + "text": "55833" + } + ], + + "multiSelect": true + }, + + { + "id": "210074700", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074701", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074704", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074702", + "title": + { + "id": "210074702", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074703", + "widget": "moniker", + "text": "Software Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Software-Engineer_55899", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074705", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074706", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074707", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074708", + "widget": "moniker", + "text": "55899" + } + ], + + "multiSelect": true + }, + + { + "id": "210074709", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074710", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074713", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074711", + "title": + { + "id": "210074711", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074712", + "widget": "moniker", + "text": "Photoshop Quality Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Photoshop-Quality-Engineer_60092", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074714", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074715", + "widget": "moniker", + "text": "New York, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074716", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074717", + "widget": "moniker", + "text": "60092" + } + ], + + "multiSelect": true + }, + + { + "id": "210074718", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074719", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074722", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074720", + "title": + { + "id": "210074720", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074721", + "widget": "moniker", + "text": "Photoshop Android Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/New-York/Photoshop-Android-Engineer_60091", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074723", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074724", + "widget": "moniker", + "text": "New York, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074725", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074726", + "widget": "moniker", + "text": "60091" + } + ], + + "multiSelect": true + }, + + { + "id": "210074727", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074728", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074731", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074729", + "title": + { + "id": "210074729", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074730", + "widget": "moniker", + "text": "Core Services Intern", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/Core-Services-Intern_56995-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074732", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074733", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074734", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074735", + "widget": "moniker", + "text": "56995" + } + ], + + "multiSelect": true + }, + + { + "id": "210074736", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074737", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074740", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074738", + "title": + { + "id": "210074738", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074739", + "widget": "moniker", + "text": "Senior Director, Sales Operations, EMEA", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/London/Senior-Director--Sales-Operations--EMEA_59917", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074741", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074742", + "widget": "moniker", + "text": "London, More..." + } + ], + + "multiSelect": true + }, + + { + "id": "210074743", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074744", + "widget": "moniker", + "text": "59917" + } + ], + + "multiSelect": true + }, + + { + "id": "210074745", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074746", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074749", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074747", + "title": + { + "id": "210074747", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074748", + "widget": "moniker", + "text": "Senior Administrative Assistant", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Maidenhead/Senior-Administrative-Assistant_60163", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074750", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074751", + "widget": "moniker", + "text": "Maidenhead" + } + ], + + "multiSelect": true + }, + + { + "id": "210074752", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074753", + "widget": "moniker", + "text": "60163" + } + ], + + "multiSelect": true + }, + + { + "id": "210074754", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074755", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074758", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074756", + "title": + { + "id": "210074756", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074757", + "widget": "moniker", + "text": "Software Development Engineer, Test Engineering", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Emeryville/Senior-Software-Engineer--Test-Engineering_60006-1", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074759", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074760", + "widget": "moniker", + "text": "Emeryville" + } + ], + + "multiSelect": true + }, + + { + "id": "210074761", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074762", + "widget": "moniker", + "text": "60006" + } + ], + + "multiSelect": true + }, + + { + "id": "210074763", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074764", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074767", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074765", + "title": + { + "id": "210074765", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074766", + "widget": "moniker", + "text": "MongoDB Database Developer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/San-Jose/MongoDB-Database-Administrator--DBA-_57056", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074768", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074769", + "widget": "moniker", + "text": "San Jose" + } + ], + + "multiSelect": true + }, + + { + "id": "210074770", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074771", + "widget": "moniker", + "text": "57056" + } + ], + + "multiSelect": true + }, + + { + "id": "210074772", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074773", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074776", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074774", + "title": + { + "id": "210074774", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074775", + "widget": "moniker", + "text": "Digital Strategist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Bangalore/Digital-Strategist_59029", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074777", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074778", + "widget": "moniker", + "text": "Bangalore" + } + ], + + "multiSelect": true + }, + + { + "id": "210074779", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074780", + "widget": "moniker", + "text": "59029" + } + ], + + "multiSelect": true + }, + + { + "id": "210074781", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074782", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074785", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074783", + "title": + { + "id": "210074783", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074784", + "widget": "moniker", + "text": "Software Quality Engineer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Bucharest/Software-Quality-Engineer_57636", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074786", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074787", + "widget": "moniker", + "text": "Bucharest" + } + ], + + "multiSelect": true + }, + + { + "id": "210074788", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074789", + "widget": "moniker", + "text": "57636" + } + ], + + "multiSelect": true + }, + + { + "id": "210074790", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074791", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074794", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074792", + "title": + { + "id": "210074792", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074793", + "widget": "moniker", + "text": "Service Manager", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Bucharest/Service-Manager_59856", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074795", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074796", + "widget": "moniker", + "text": "Bucharest" + } + ], + + "multiSelect": true + }, + + { + "id": "210074797", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074798", + "widget": "moniker", + "text": "59856" + } + ], + + "multiSelect": true + }, + + { + "id": "210074799", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074800", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074803", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074801", + "title": + { + "id": "210074801", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074802", + "widget": "moniker", + "text": "HR Compliance", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Maidenhead/HR-Compliance_59975", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074804", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074805", + "widget": "moniker", + "text": "Maidenhead" + } + ], + + "multiSelect": true + }, + + { + "id": "210074806", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074807", + "widget": "moniker", + "text": "59975" + } + ], + + "multiSelect": true + }, + + { + "id": "210074808", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074809", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074812", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074810", + "title": + { + "id": "210074810", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074811", + "widget": "moniker", + "text": "Benefits Specialist", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Maidenhead/Benefits-Specialist_57577", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074813", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074814", + "widget": "moniker", + "text": "Maidenhead" + } + ], + + "multiSelect": true + }, + + { + "id": "210074815", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074816", + "widget": "moniker", + "text": "57577" + } + ], + + "multiSelect": true + }, + + { + "id": "210074817", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074818", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074821", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074819", + "title": + { + "id": "210074819", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074820", + "widget": "moniker", + "text": "Technical/Engineering Lead - Big Data (Hadoop)", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Noida/Technical-Engineering-Lead---Big-Data--Hadoop-_60133", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074822", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074823", + "widget": "moniker", + "text": "Noida" + } + ], + + "multiSelect": true + }, + + { + "id": "210074824", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074825", + "widget": "moniker", + "text": "60133" + } + ], + + "multiSelect": true + }, + + { + "id": "210074826", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074827", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074830", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074828", + "title": + { + "id": "210074828", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074829", + "widget": "moniker", + "text": "Sr Hadoop Developer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Noida/Sr-Hadoop-Developer_59914", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074831", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074832", + "widget": "moniker", + "text": "Noida" + } + ], + + "multiSelect": true + }, + + { + "id": "210074833", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074834", + "widget": "moniker", + "text": "59914" + } + ], + + "multiSelect": true + }, + + { + "id": "210074835", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074836", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + }, + + { + "id": "210074839", + "widget": "templatedListItem", + "ecid": "templatedListItem.210074837", + "title": + { + "id": "210074837", + "widget": "monikerList", + "ecid": "monikerList", + "propertyName": "None", + "singular": true, + "instances": + [ + { + "id": "210074838", + "widget": "moniker", + "text": "Sr Hadoop Developer", + "action": "GET", + "v": true + } + ], + + "commandLink": "/en-US/external_experienced/job/Bangalore/Sr-Hadoop-Developer_60134", + "multiSelect": true + }, + + "subtitles": + [ + { + "id": "210074840", + "widget": "monikerList", + "ecid": "monikerList.job.locationsText", + "instances": + [ + { + "id": "210074841", + "widget": "moniker", + "text": "Bangalore" + } + ], + + "multiSelect": true + }, + + { + "id": "210074842", + "widget": "monikerList", + "ecid": "monikerList.job.jobRequisitionId", + "instances": + [ + { + "id": "210074843", + "widget": "moniker", + "text": "60134" + } + ], + + "multiSelect": true + }, + + { + "id": "210074844", + "widget": "monikerList", + "ecid": "monikerList.job.postedOn", + "instances": + [ + { + "id": "210074845", + "widget": "moniker", + "text": "Posted 2 Days Ago" + } + ], + + "multiSelect": true + } + ] + } + ] + } + ], + + "endPoints": + [ + { + "type": "Search", + "uri": "/external_experienced/fs/search/318c8bb6f553100021d223d9780d30be", + "id": "210073939" + }, + + { + "type": "Pagination", + "uri": "/external_experienced/fs/searchPagination/318c8bb6f553100021d223d9780d30be", + "id": "210073940" + }, + + { + "type": "Reset", + "uri": "/external_experienced/fs/resetSearch/318c8bb6f553100021d223d9780d30be", + "id": "210073941" + }, + + { + "type": "Configure_Expand", + "uri": "/external_experienced/fs/expandFacet/318c8bb6f553100021d223d9780d30be", + "id": "210073942" + }, + + { + "type": "Replace", + "uri": "/external_experienced/fs/replaceFacet/318c8bb6f553100021d223d9780d30be", + "id": "210073943" + }, + + { + "type": "Recover_Current_State", + "uri": "/external_experienced/fs/refreshFacet/318c8bb6f553100021d223d9780d30be", + "id": "210073944" + } + ], + + "uiLabelIdentifier": "11111$11" + } + ], + + "tabContent": true + }, + + "currentTime": 0, + "ecid": "318c8bb6f553100021d223d9780d30be", + "failedSignonCount": 0, + "header": + { + "id": "210074852", + "label": "Override label", + "widget": "header", + "languagePicker": + { + "id": "210074853", + "label": "English", + "widget": "commandButton", + "ecid": "commandButton.languagePicker", + "values": + [ + { + "label": "日本語 (日本)", + "uri": "/ja-JP/external_experienced", + "httpMethod": "GET", + "type": "BOOTSTRAP" + }, + + { + "label": "English", + "uri": "/en-US/external_experienced", + "httpMethod": "GET", + "type": "BOOTSTRAP" + } + ], + + "footerOrder": 0, + "rule": 1 + }, + + "authentication": + { + "id": "210074854", + "widget": "authentication", + "signInRequestUri": "/en-US/external_experienced/login", + "forgotPasswordRequestUri": "/en-US/external_experienced/initiatereset", + "createAccountRequestUri": "/en-US/external_experienced/register", + "resetPasswordRequestUri": "/en-US/external_experienced/passwordreset", + "resendAccountActivationEmailRequestUri": "/en-US/external_experienced/resendactivationinfo", + "logoUrl": "/external_experienced/assets/n1140199069", + "title": "Adobe Careers", + "showAsLink": true, + "state": "signIn", + "signOutUri": "/en-US/external_experienced/signout", + "successRedirectUri": "/en-US/external_experienced", + "uiLabelIdentifier": "11111$62" + }, + + "headerFashion": "JOURNEYMAN", + "titleAlignment": "BOTTOM", + "backgroundImageUrl": "URL(\"/external_experienced/assets/n1102812723\")", + "titleColor": "White" + }, + + "title": + { + "id": "210074849", + "widget": "titleStaticText", + "text": "Adobe Careers" + }, + + "windowTitle": "Search for Jobs", + "homeUri": "/en-US/external_experienced", + "localeAttributes": + { + "dateOrder": "MM/DD/YYYY", + "dateToolTip": "MM/DD/YYYY", + "decimalSeparator": ".", + "firstDayOfWeek": "sun", + "hourClock": "12", + "thousandsSeparator": ",", + "userLocale": "en_US", + "id": "210074851", + "workdayDayMonthRange": "MMM1 DD1 – MMM2 DD2, YYYY", + "workdayFullDateRange": "MMM1 DD1, YYYY1 – MMM2 DD2, YYYY2" + }, + + "openGraphAttributes": + { + "type": "website", + "title": "Search for Jobs", + "url": "https://adobe.wd5.myworkdayjobs.com/external_experienced/", + "imageUrl": "https://adobe.wd5.myworkdayjobs.com/external_experienced/assets/n1140199069", + "description": "Adobe Careers", + "id": "210074850" + }, + + "mode": "view", + "notifications": + { + "widget": "notifications", + "retrieveUnreadUri": "/notifications/unread", + "retrieveUnseenUri": "/notifications/unseen", + "totalCount": 0, + "unreadCount": 0, + "unseenCount": 0 + }, + + "requestUri": "/en-US/external_experienced/flowController", + "sessionSecureToken": "qb23ptdg9fjqmrgoa48cthotmi", + "uiLabels": + { + "WDRES.PAGINATION.Items": "{0} item(s)", + "WDRES.FACETEDSEARCH.SaveSearchDialogTitle": "Name your search", + "WDRES.FACETEDSEARCH.SaveSearchDialogHint": "Search name", + "WDRES.FACETEDSEARCH.SaveSearch": "Save Search", + "WDRES.FACETEDSEARCH.Compare": "Compare", + "WDRES.FACETEDSEARCH.ResultsCount": "Results ({0})", + "WDRES.TOOLTIP.Excel": "Export to Excel", + "WDRES.FACETEDSEARCH.ShowAll": "Show All", + "WDRES.FACETEDSEARCH.CurrentSearch": "Current Search", + "WDRES.FACETEDSEARCH.Search": "Search", + "WDRES.FACET.EXPANDCOLLAPSE": "Expand / Collapse {0}", + "WDRES.FACETEDSEARCH.SearchMore": "Search More…", + "WDRES.FACETEDSEARCH.NoFiltersFound": "No Filters Found", + "WDRES.FACETEDSEARCH.NoAvailableFacets": "No filters available for '{0}'", + "WDRES.FACETEDSEARCH.FilteredResults": "Filtered Results", + "WDRES.FACETEDSEARCH.AllFilters": "All Filters", + "WDRES.FACETEDSEARCH.ClearAll": "Clear All", + "WDRES.FACETEDSEARCH.NMoreLink": ", +{0} more...", + "WDRES.QUICKTIPS.FacetSearch.FacetList": "Use to filter the population when looking for specific groups of employees", + "WDRES.FACETEDSEARCH.FilterBy": "Filter By", + "WDRES.QUICKTIPS.FacetSearch.FacetListHeading": "Filter", + "WDRES.FACETEDSEARCH.SelectedFilters": "Selected Filters", + "WDRES.BUTTON.Done": "Done", + "WDRES.COMMON.Filter": "Filter", + "WDRES.BUTTON.Apply": "Apply", + "WDRES.FACETEDSEARCH.SavedSearchLoadedAriaLive": "Saved Search Loaded", + "WDRES.FACETEDSEARCH.SearchFiltersSavedAriaLive": "Search Term and Filters Saved", + "WDRES.FACETEDSEARCH.SkipToResults": "Skip To Results", + "WDRES.FACETEDSEARCH.SearchFiltersClearedAriaLive": "Search Term and Filters Cleared", + "WDRES.FACETEDSEARCH.QueryName": "Name", + "WDRES.FACETEDSEARCH.Open": "Open", + "WDRES.COMMON.Filters": "Filters", + "WDRES.FACETEDSEARCH.EmptyResultsPageTip": "Tip: try entering a different search term.", + "WDRES.FACETEDSEARCH.Continue": "Continue My Last Search", + "WDRES.PAGINATION.NoResults": "No search results found", + "WDRES.FACETEDSEARCH.Remove": "Remove", + "WDRES.FACETEDSEARCH.SavedSearchesWithCounter": "Saved Searches ({0})", + "WDRES.PAGINATION.Info": "{0}-{1} of {2}", + "WDRES.FACETEDSEARCH.SearchTextBox": "{0} Search Box", + "WDRES.PDF_PREVIEW.DownloadAttachment": "Download Attachment", + "WDRES.VALIDATOR.FieldIsRequired": "The field {0} is required and must have a value.", + "WDRES.PDF_PREVIEW.Number": "({0})", + "WDRES.FACETEDSEARCH.ClearAllFilters": "Clear all filters?", + "WDRES.PDF_PREVIEW.FileDownloaded": "Your file has been downloaded", + "WDRES.PDF_PREVIEW.NoPreviewAvailable": "No Preview Available", + "WDRES.PDF_PREVIEW.ErrorLoadingFile": "There was an error loading the file", + "WDRES.PDF_PREVIEW.CannotDownload": "This attachment cannot be downloaded.", + "WDRES.FACETEDSEARCH.SymanSyntax": "Boolean logic can be expressed using AND, OR, NOT, and (). \nExact phrases can be surrounded by double quotation marks. \nExample: (\"software engineer\" OR computer science) AND manager.", + "WDRES.BUTTON.Ok": "OK", + "WDRES.SEARCH.TEXT.Placeholder": "search", + "WDRES.FACETEDSEARCH.Save": "Save", + "WDRES.BUTTON.Close": "Close", + "WDRES.REPORTS.Report": "Report", + "WDRES.FACETEDSEARCH.FilteredResultsCount": "Filtered Results ({0})", + "WDRES.MASSACTIONS.TotalResults": "{0} Results", + "WDRES.MASSACTIONS.TotalResult": "1 Result", + "WDRES.DISTANCE.InvalidPostalCode": "Enter a valid postal code", + "WDRES.DISTANCE.City": "City", + "WDRES.DISTANCE.PostalCode": "postal code", + "WDRES.FACETEDSEARCH.NoResultsFoundFor": "No results found for '{0}'", + "WDRES.FACETEDSEARCH.People": "People", + "WDRES.FACETEDSEARCH.SavedSearches": "Saved Searches", + "WDRES.BUTTON.Cancel": "Cancel", + "WDRES.FACETEDSEARCH.PreFixPopupLabel": "Boolean Search Tips" + }, + + "userLanguageCode": "en-US", + "preferredLabelPosition": "atop", + "notificationAlertUri": "/notifications-alert", + "pmdFetchTime": 0, + "smdFetchTime": 0 +} \ No newline at end of file diff --git a/src/main/resources/podcast/BeyondPodFeeds.opml.xml b/src/main/resources/podcast/BeyondPodFeeds.opml.xml new file mode 100644 index 00000000..61b54aee --- /dev/null +++ b/src/main/resources/podcast/BeyondPodFeeds.opml.xml @@ -0,0 +1,46 @@ + + + + BeyondPod Feeds + Sun, 03 Feb 2019 21:12:30 GMT+08:00 + Sun, 03 Feb 2019 21:12:30 GMT+08:00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/main/scala/Solution.scala b/src/main/scala/Solution.scala new file mode 100644 index 00000000..cde48991 --- /dev/null +++ b/src/main/scala/Solution.scala @@ -0,0 +1,19 @@ + + +object Solution { + + def simpleArraySum(n: Int, ar: Array[Int]): Int = { + ar.reduce(_+_) + } + + def main(args: Array[String]) { + val sc = new java.util.Scanner (System.in); + var n = sc.nextInt(); + var ar = new Array[Int](n); + for(ar_i <- 0 to n-1) { + ar(ar_i) = sc.nextInt(); + } + val result = simpleArraySum(n, ar); + println(result) + } +} diff --git a/src/main/scala/contexRun.sc b/src/main/scala/contexRun.sc new file mode 100644 index 00000000..a4d981a0 --- /dev/null +++ b/src/main/scala/contexRun.sc @@ -0,0 +1,22 @@ +object contexRun { + abstract class Drink + abstract class SoftDrink() extends Drink + abstract class Juice() extends Drink + case class Cola() extends SoftDrink + case class Sprite() extends SoftDrink + case class Orange() extends Juice + + val al = List(Cola(), Cola()) //> al : List[contexRun.Cola] = List(Cola(), Cola()) + + val bl = List(Sprite()) //> bl : List[contexRun.Sprite] = List(Sprite()) + + val cl = al ++ bl ++ List(Orange()) //> cl : List[Product with Serializable with contexRun.Drink] = List(Cola(), Co + //| la(), Sprite(), Orange()) + + def print[A >: SoftDrink](list: List[A]): String = { + list.mkString(", ") + } //> print: [A >: contexRun.SoftDrink](list: List[A])String + + print(cl) //> res0: String = Cola(), Cola(), Sprite(), Orange() + +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/App/FetchJavascriptStyleGuide.scala b/src/main/scala/ground/learning/App/FetchJavascriptStyleGuide.scala new file mode 100644 index 00000000..ba41f94c --- /dev/null +++ b/src/main/scala/ground/learning/App/FetchJavascriptStyleGuide.scala @@ -0,0 +1,30 @@ +package ground.learning.App + +import scalaj.http._ + +import java.nio.file.{ Paths, Files } +import java.nio.charset.StandardCharsets +import org.jsoup.Jsoup +import org.jsoup.select.Elements + +object FetchJavascriptStyleGuide extends App { + val file = Paths.get("response.txt") + def downloadAndSave() = { + val response: HttpResponse[String] = Http("https://github.com/airbnb/javascript/blob/master/README.md").asString + val content = new String(response.body.getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8) + Files.write(file, content.getBytes(StandardCharsets.UTF_8)) + Files.write(Paths.get("response_meta.txt"), response.cookies.toString().getBytes(StandardCharsets.UTF_8)) + } + + def parseHtml() = { + val html = new String(Files.readAllBytes(file), StandardCharsets.UTF_8) + val doc = Jsoup.parse(html); + val links = doc.select("""div#readme"""); + import scala.collection.JavaConverters._ + links.asScala.toList.flatMap(elem => elem.select("li > p").asScala.map(element => element.text())).zipWithIndex + } + + val result = parseHtml().filter(_._1.length() > 10).map( item => item._2 + "\t:\t" + item._1) + result.foreach(println) + +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/App/FetchWebPage.scala b/src/main/scala/ground/learning/App/FetchWebPage.scala new file mode 100644 index 00000000..2cbd555b --- /dev/null +++ b/src/main/scala/ground/learning/App/FetchWebPage.scala @@ -0,0 +1,19 @@ +package ground.learning.App + +object FetchWebPage extends App { + + import scalaj.http._ + + import java.nio.file.{ Paths, Files } + import java.nio.charset.StandardCharsets + + val response: HttpResponse[String] = Http("https://adobe.wd5.myworkdayjobs.com/en-US/external_experienced/").asString + Files.write(Paths.get("response.txt"), response.body.getBytes(StandardCharsets.UTF_8)) + Files.write(Paths.get("response_meta.txt"), response.cookies.toString().getBytes(StandardCharsets.UTF_8)) + + // response.body + // response.code + // response.headers + // response.cookies + +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/App/JavascriptGuidelines.md b/src/main/scala/ground/learning/App/JavascriptGuidelines.md new file mode 100644 index 00000000..e899cd16 --- /dev/null +++ b/src/main/scala/ground/learning/App/JavascriptGuidelines.md @@ -0,0 +1,135 @@ +* 0 : 1.1 Primitives: When you access a primitive type you work directly on its value. +* 1 : 1.2 Complex: When you access a complex type you work on a reference to its value. +* 2 : 2.1 Use const for all of your references; avoid using var. eslint: prefer-const, no-const-assign +* 3 : 2.2 If you must reassign references, use let instead of var. eslint: no-var jscs: disallowVar +* 4 : 2.3 Note that both let and const are block-scoped. +* 5 : 3.1 Use the literal syntax for object creation. eslint: no-new-object +* 6 : 3.2 Use computed property names when creating objects with dynamic property names. +* 7 : 3.3 Use object method shorthand. eslint: object-shorthand jscs: requireEnhancedObjectLiterals +* 8 : 3.4 Use property value shorthand. eslint: object-shorthand jscs: requireEnhancedObjectLiterals +* 9 : 3.5 Group your shorthand properties at the beginning of your object declaration. +* 10 : 3.6 Only quote properties that are invalid identifiers. eslint: quote-props jscs: disallowQuotedKeysInObjects +* 11 : 3.7 Do not call Object.prototype methods directly, such as hasOwnProperty, propertyIsEnumerable, and isPrototypeOf. +* 12 : 3.8 Prefer the object spread operator over Object.assign to shallow-copy objects. Use the object rest operator to get a new object with certain properties omitted. +* 13 : 4.1 Use the literal syntax for array creation. eslint: no-array-constructor +* 14 : 4.2 Use Array#push instead of direct assignment to add items to an array. +* 15 : 4.3 Use array spreads ... to copy arrays. +* 16 : 4.4 To convert an array-like object to an array, use spreads ... instead of Array.from. +* 17 : 4.5 Use Array.from instead of spread ... for mapping over iterables, because it avoids creating an intermediate array. +* 18 : 4.6 Use return statements in array method callbacks. It’s ok to omit the return if the function body consists of a single statement returning an expression without side effects, following 8.2. eslint: array-callback-return +* 19 : 4.7 Use line breaks after open and before close array brackets if an array has multiple lines +* 20 : 5.1 Use object destructuring when accessing and using multiple properties of an object. eslint: prefer-destructuring jscs: requireObjectDestructuring +* 21 : 5.2 Use array destructuring. eslint: prefer-destructuring jscs: requireArrayDestructuring +* 22 : 5.3 Use object destructuring for multiple return values, not array destructuring. jscs: disallowArrayDestructuringReturn +* 23 : 6.1 Use single quotes '' for strings. eslint: quotes jscs: validateQuoteMarks +* 24 : 6.2 Strings that cause the line to go over 100 characters should not be written across multiple lines using string concatenation. +* 25 : 6.3 When programmatically building up strings, use template strings instead of concatenation. eslint: prefer-template template-curly-spacing jscs: requireTemplateStrings +* 26 : 6.5 Do not unnecessarily escape characters in strings. eslint: no-useless-escape +* 27 : 7.1 Use named function expressions instead of function declarations. eslint: func-style jscs: disallowFunctionDeclarations +* 28 : 7.2 Wrap immediately invoked function expressions in parentheses. eslint: wrap-iife jscs: requireParenthesesAroundIIFE +* 29 : 7.4 Note: ECMA-262 defines a block as a list of statements. A function declaration is not a statement. +* 30 : 7.5 Never name a parameter arguments. This will take precedence over the arguments object that is given to every function scope. +* 31 : 7.6 Never use arguments, opt to use rest syntax ... instead. eslint: prefer-rest-params +* 32 : 7.7 Use default parameter syntax rather than mutating function arguments. +* 33 : 7.8 Avoid side effects with default parameters. +* 34 : 7.9 Always put default parameters last. +* 35 : 7.10 Never use the Function constructor to create a new function. eslint: no-new-func +* 36 : 7.11 Spacing in a function signature. eslint: space-before-function-paren space-before-blocks +* 37 : 7.12 Never mutate parameters. eslint: no-param-reassign +* 38 : 7.13 Never reassign parameters. eslint: no-param-reassign +* 39 : 7.14 Prefer the use of the spread operator ... to call variadic functions. eslint: prefer-spread +* 40 : 7.15 Functions with multiline signatures, or invocations, should be indented just like every other multiline list in this guide: with each item on a line by itself, with a trailing comma on the last item. eslint: function-paren-newline +* 41 : 8.1 When you must use an anonymous function (as when passing an inline callback), use arrow function notation. eslint: prefer-arrow-callback, arrow-spacing jscs: requireArrowFunctions +* 42 : 8.2 If the function body consists of a single statement returning an expression without side effects, omit the braces and use the implicit return. Otherwise, keep the braces and use a return statement. eslint: arrow-parens, arrow-body-style jscs: disallowParenthesesAroundArrowParam, requireShorthandArrowFunctions +* 43 : 8.3 In case the expression spans over multiple lines, wrap it in parentheses for better readability. +* 44 : 8.4 If your function takes a single argument and doesn’t use braces, omit the parentheses. Otherwise, always include parentheses around arguments for clarity and consistency. Note: it is also acceptable to always use parentheses, in which case use the “always” option for eslint or do not include disallowParenthesesAroundArrowParam for jscs. eslint: arrow-parens jscs: disallowParenthesesAroundArrowParam +* 45 : 8.5 Avoid confusing arrow function syntax (=>) with comparison operators (<=, >=). eslint: no-confusing-arrow +* 46 : 9.1 Always use class. Avoid manipulating prototype directly. +* 47 : 9.2 Use extends for inheritance. +* 48 : 9.3 Methods can return this to help with method chaining. +* 49 : 9.4 It’s okay to write a custom toString() method, just make sure it works successfully and causes no side effects. +* 50 : 9.5 Classes have a default constructor if one is not specified. An empty constructor function or one that just delegates to a parent class is unnecessary. eslint: no-useless-constructor +* 51 : 9.6 Avoid duplicate class members. eslint: no-dupe-class-members +* 52 : 10.1 Always use modules (import/export) over a non-standard module system. You can always transpile to your preferred module system. +* 53 : 10.2 Do not use wildcard imports. +* 54 : 10.3 And do not export directly from an import. +* 55 : 10.4 Only import from a path in one place. eslint: no-duplicate-imports +* 56 : 10.5 Do not export mutable bindings. eslint: import/no-mutable-exports +* 57 : 10.6 In modules with a single export, prefer default export over named export. eslint: import/prefer-default-export +* 58 : 10.7 Put all imports above non-import statements. eslint: import/first +* 59 : 10.8 Multiline imports should be indented just like multiline array and object literals. +* 60 : 10.9 Disallow Webpack loader syntax in module import statements. eslint: import/no-webpack-loader-syntax +* 61 : 11.1 Don’t use iterators. Prefer JavaScript’s higher-order functions instead of loops like for-in or for-of. eslint: no-iterator no-restricted-syntax +* 62 : 11.2 Don’t use generators for now. +* 63 : 11.3 If you must use generators, or if you disregard our advice, make sure their function signature is spaced properly. eslint: generator-star-spacing +* 64 : 12.1 Use dot notation when accessing properties. eslint: dot-notation jscs: requireDotNotation +* 65 : 12.2 Use bracket notation [] when accessing properties with a variable. +* 66 : 12.3 Use exponentiation operator ** when calculating exponentiations. eslint: no-restricted-properties. +* 67 : 13.1 Always use const or let to declare variables. Not doing so will result in global variables. We want to avoid polluting the global namespace. Captain Planet warned us of that. eslint: no-undef prefer-const +* 68 : 13.2 Use one const or let declaration per variable. eslint: one-var jscs: disallowMultipleVarDecl +* 69 : 13.3 Group all your consts and then group all your lets. +* 70 : 13.4 Assign variables where you need them, but place them in a reasonable place. +* 71 : 13.5 Don’t chain variable assignments. eslint: no-multi-assign +* 72 : 13.6 Avoid using unary increments and decrements (++, --). eslint no-plusplus +* 73 : 13.7 Avoid linebreaks before or after = in an assignment. If your assignment violates max-len, surround the value in parens. eslint operator-linebreak. +* 74 : 14.1 var declarations get hoisted to the top of their closest enclosing function scope, their assignment does not. const and let declarations are blessed with a new concept called Temporal Dead Zones (TDZ). It’s important to know why typeof is no longer safe. +* 75 : 14.2 Anonymous function expressions hoist their variable name, but not the function assignment. +* 76 : 14.3 Named function expressions hoist the variable name, not the function name or the function body. +* 77 : 14.4 Function declarations hoist their name and the function body. +* 78 : For more information refer to JavaScript Scoping & Hoisting by Ben Cherry. +* 79 : 15.2 Conditional statements such as the if statement evaluate their expression using coercion with the ToBoolean abstract method and always follow these simple rules: +* 80 : 15.3 Use shortcuts for booleans, but explicit comparisons for strings and numbers. +* 81 : 15.5 Use braces to create blocks in case and default clauses that contain lexical declarations (e.g. let, const, function, and class). eslint: no-case-declarations +* 82 : 15.6 Ternaries should not be nested and generally be single line expressions. eslint: no-nested-ternary +* 83 : 15.7 Avoid unneeded ternary statements. eslint: no-unneeded-ternary +* 84 : 15.8 When mixing operators, enclose them in parentheses. The only exception is the standard arithmetic operators (+, -, *, & /) since their precedence is broadly understood. eslint: no-mixed-operators +* 85 : 16.1 Use braces with all multi-line blocks. eslint: nonblock-statement-body-position +* 86 : 16.2 If you're using multi-line blocks with if and else, put else on the same line as your if block’s closing brace. eslint: brace-style jscs: disallowNewlineBeforeBlockStatements +* 87 : 16.3 If an if block always executes a return statement, the subsequent else block is unnecessary. A return in an else if block following an if block that contains a return can be separated into multiple if blocks. eslint: no-else-return +* 88 : 17.1 In case your control statement (if, while etc.) gets too long or exceeds the maximum line length, each (grouped) condition could be put into a new line. The logical operator should begin the line. +* 89 : 17.2 Don't use selection operators in place of control statements. +* 90 : 18.1 Use /** ... */ for multi-line comments. +* 91 : 18.2 Use // for single line comments. Place single line comments on a newline above the subject of the comment. Put an empty line before the comment unless it’s on the first line of a block. +* 92 : 18.3 Start all comments with a space to make it easier to read. eslint: spaced-comment +* 93 : 18.5 Use // FIXME: to annotate problems. +* 94 : 18.6 Use // TODO: to annotate solutions to problems. +* 95 : 19.1 Use soft tabs (space character) set to 2 spaces. eslint: indent jscs: validateIndentation +* 96 : 19.2 Place 1 space before the leading brace. eslint: space-before-blocks jscs: requireSpaceBeforeBlockStatements +* 97 : 19.3 Place 1 space before the opening parenthesis in control statements (if, while etc.). Place no space between the argument list and the function name in function calls and declarations. eslint: keyword-spacing jscs: requireSpaceAfterKeywords +* 98 : 19.4 Set off operators with spaces. eslint: space-infix-ops jscs: requireSpaceBeforeBinaryOperators, requireSpaceAfterBinaryOperators +* 99 : 19.5 End files with a single newline character. eslint: eol-last +* 100 : 19.6 Use indentation when making long method chains (more than 2 method chains). Use a leading dot, which emphasizes that the line is a method call, not a new statement. eslint: newline-per-chained-call no-whitespace-before-property +* 101 : 19.7 Leave a blank line after blocks and before the next statement. jscs: requirePaddingNewLinesAfterBlocks +* 102 : 19.8 Do not pad your blocks with blank lines. eslint: padded-blocks jscs: disallowPaddingNewlinesInBlocks +* 103 : 19.9 Do not add spaces inside parentheses. eslint: space-in-parens jscs: disallowSpacesInsideParentheses +* 104 : 19.10 Do not add spaces inside brackets. eslint: array-bracket-spacing jscs: disallowSpacesInsideArrayBrackets +* 105 : 19.11 Add spaces inside curly braces. eslint: object-curly-spacing jscs: requireSpacesInsideObjectBrackets +* 106 : 19.12 Avoid having lines of code that are longer than 100 characters (including whitespace). Note: per above, long strings are exempt from this rule, and should not be broken up. eslint: max-len jscs: maximumLineLength +* 107 : 20.1 Leading commas: Nope. eslint: comma-style jscs: requireCommaBeforeLineBreak +* 108 : 20.2 Additional trailing comma: Yup. eslint: comma-dangle jscs: requireTrailingComma +* 109 : 21.1 Yup. eslint: semi jscs: requireSemicolons +* 111 : 22.2 Strings: eslint: no-new-wrappers +* 112 : 22.3 Numbers: Use Number for type casting and parseInt always with a radix for parsing strings. eslint: radix no-new-wrappers +* 113 : 22.4 If for whatever reason you are doing something wild and parseInt is your bottleneck and need to use Bitshift for performance reasons, leave a comment explaining why and what you're doing. +* 114 : 22.5 Note: Be careful when using bitshift operations. Numbers are represented as 64-bit values, but bitshift operations always return a 32-bit integer (source). Bitshift can lead to unexpected behavior for integer values larger than 32 bits. Discussion. Largest signed 32-bit Int is 2,147,483,647: +* 115 : 22.6 Booleans: eslint: no-new-wrappers +* 116 : 23.1 Avoid single letter names. Be descriptive with your naming. eslint: id-length +* 117 : 23.2 Use camelCase when naming objects, functions, and instances. eslint: camelcase jscs: requireCamelCaseOrUpperCaseIdentifiers +* 118 : 23.3 Use PascalCase only when naming constructors or classes. eslint: new-cap jscs: requireCapitalizedConstructors +* 119 : 23.4 Do not use trailing or leading underscores. eslint: no-underscore-dangle jscs: disallowDanglingUnderscores +* 120 : 23.5 Don’t save references to this. Use arrow functions or Function#bind. jscs: disallowNodeTypes +* 121 : 23.6 A base filename should exactly match the name of its default export. +* 122 : 23.7 Use camelCase when you export-default a function. Your filename should be identical to your function’s name. +* 123 : 23.8 Use PascalCase when you export a constructor / class / singleton / function library / bare object. +* 124 : 23.9 Acronyms and initialisms should always be all capitalized, or all lowercased. +* 125 : 24.2 Do not use JavaScript getters/setters as they cause unexpected side effects and are harder to test, maintain, and reason about. Instead, if you do make accessor functions, use getVal() and setVal('hello'). +* 126 : 24.3 If the property/method is a boolean, use isVal() or hasVal(). +* 127 : 24.4 It’s okay to create get() and set() functions, but be consistent. +* 128 : 25.1 When attaching data payloads to events (whether DOM events or something more proprietary like Backbone events), pass an object literal (also known as a "hash") instead of a raw value. This allows a subsequent contributor to add more data to the event payload without finding and updating every handler for the event. For example, instead of: +* 130 : 26.1 Prefix jQuery object variables with a $. jscs: requireDollarBeforejQueryAssignment +* 131 : 26.2 Cache jQuery lookups. +* 132 : 26.4 Use find with scoped jQuery object queries. +* 133 : 28.2 Do not use TC39 proposals that have not reached stage 3. +* 134 : 29.1 Use Number.isNaN instead of global isNaN. eslint: no-restricted-globals +* 135 : 29.2 Use Number.isFinite instead of global isFinite. eslint: no-restricted-globals +* \ No newline at end of file diff --git a/src/main/scala/ground/learning/SO/GenericTypeApp.sc b/src/main/scala/ground/learning/SO/GenericTypeApp.sc new file mode 100644 index 00000000..b0664996 --- /dev/null +++ b/src/main/scala/ground/learning/SO/GenericTypeApp.sc @@ -0,0 +1,14 @@ +object GenericTypeApp extends App { + + abstract class Drink + abstract class SoftDrink() extends Drink + abstract class Juice() extends Drink + case class Cola() extends SoftDrink + case class Sprite() extends SoftDrink + + val al = List(Cola(), Cola()) + + val bl = List(Sprite()) + + val cl = al ++ bl +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/Sam/ThreadApp.scala b/src/main/scala/ground/learning/Sam/ThreadApp.scala new file mode 100644 index 00000000..ae844432 --- /dev/null +++ b/src/main/scala/ground/learning/Sam/ThreadApp.scala @@ -0,0 +1,18 @@ +package ground.learning.Sam + +object ThreadApp extends App { + println("Main thread - begins") + val runnable: Runnable = () => println("hello world - from first thread") + + val thread = new Thread(runnable) + println("Main thread - spins first thread") + thread.start() + + val thread2 = new Thread(() => println("hello world - from second thread")) + println("Main thread - spins second thread") + thread2.start + thread.join() + thread2.join() + + println("Main thread - end") +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/akka/httpserver/WebServer .scala b/src/main/scala/ground/learning/akka/httpserver/WebServer .scala new file mode 100644 index 00000000..0b7a4b82 --- /dev/null +++ b/src/main/scala/ground/learning/akka/httpserver/WebServer .scala @@ -0,0 +1,40 @@ +package ground.learning.akka.httpserver + +import akka.actor.ActorSystem +import akka.http.scaladsl.Http +import akka.http.scaladsl.model._ +import akka.http.scaladsl.server.Directives._ +import akka.stream.ActorMaterializer +import scala.io.StdIn + +object WebServer { + def main(args: Array[String]) { + + implicit val system = ActorSystem("my-system") + implicit val materializer = ActorMaterializer() + // needed for the future flatMap/onComplete in the end + implicit val executionContext = system.dispatcher + + /* + * Withoug materializer compiler would throw below issue. + * found : akka.http.scaladsl.server.Route (which expands to) akka.http.scaladsl.server.RequestContext => scala.concurrent.Future[akka.http.scaladsl.server.RouteResult] + * required: akka.stream.scaladsl.Flow[akka.http.scaladsl.model.HttpRequest,akka.http.scaladsl.model.HttpResponse,Any] + */ + + + val route = + path("hello") { + get { + complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, "

Say hello to akka-http

")) + } + } + + val bindingFuture = Http().bindAndHandle(route, "localhost", 8080) + + println(s"Server online at http://localhost:8080/\nPress RETURN to stop...") + StdIn.readLine() // let it run until user presses return + bindingFuture + .flatMap(_.unbind()) // trigger unbinding from the port + .onComplete(_ => system.terminate()) // and shutdown when done + } +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/fp/FreeMonad/Free2.scala b/src/main/scala/ground/learning/fp/FreeMonad/Free2.scala new file mode 100644 index 00000000..cb042848 --- /dev/null +++ b/src/main/scala/ground/learning/fp/FreeMonad/Free2.scala @@ -0,0 +1,7 @@ +package ground.learning.fp.FreeMonad + +object Free2 { + sealed trait Free[F[_], A] { + + } +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/fp/FreeMonad/FreeMonad.sc b/src/main/scala/ground/learning/fp/FreeMonad/FreeMonad.sc new file mode 100644 index 00000000..4cf42d38 --- /dev/null +++ b/src/main/scala/ground/learning/fp/FreeMonad/FreeMonad.sc @@ -0,0 +1,122 @@ +package ground.learning.fp.FreeMonad +object FreeMonad { + println("https://www.youtube.com/watch?v=7xSfLPD6tiQ") + println("Pure Functional Database Programming with Fixpoint Types—Rob Norris") + println("https://tpolecat.github.io/presentations/sw2016/slides.html#4") + //A recursive data type for professors and their Ph.D. students + object attempt_1 { + case class Prof( + name: String, + year: Int, + students: List[Prof]) + /* + How do we store the auto-generated primary key, Simple INT won't be right way to do it + CREATE TABLE prof ( + id INTEGER IDENTITY, + parent INTEGER NULL, + name VARCHAR NOT NULL, + year INTEGER NOT NULL, + FOREIGN KEY(parent) REFERENCES prof(id) + ) + */ + } + object attempt_2 { + /** + * id: Option[Int] - Represents three possibilities + * 1. Just constructed objected, no ID in DB + * 2. Just retrieved from DB, there is an id + * 3. In some computation, where DB-ID is not important for computation + */ + case class Prof( + id: Option[Int], + name: String, + year: Int, + students: List[Prof]) + /* + CREATE TABLE prof ( + id INTEGER IDENTITY, + parent INTEGER NULL, + name VARCHAR NOT NULL, + year INTEGER NOT NULL, + FOREIGN KEY(parent) REFERENCES prof(id) + ) + */ + } + object attempt_3 { + /** + * id: Let us handled ID separately + * But it creates new problem as we can't keep "students: List[Prof]" since it looses id + * it should be stored as "students: List[(Int, Prof)]" + */ + case class Prof( + name: String, + year: Int, + students: List[Prof]) + + type IdProf = (Int, Prof) + } + object attempt_4 { + /** + * (Int, Prof) - Was not sure if it make sense, alternatively we can avoid to find the Type by parameterizing + */ + case class ProfF[A]( + name: String, + year: Int, + students: List[A]) + + object attempt_4_1 { + //Here there is new problem - class ProfF takes type parameters + type Prof = ProfF[ProfF] + type IdProf = (Int, ProfF[(Int, ProfF)]) + } + object attempt_4_2 { + //Actually it is infinitely recursive + type Prof = ProfF[ProfF[ProfF[ProfF[ProfF]]]] + type IdProf = (Int, ProfF[(Int, ProfF[ProfF[ProfF[ProfF]]])]) + + } + /** Conclusion: Type aliases can't be recursive, but classes can be recursive */ + } + + object attempt_5 { + /** + * (Int, Prof) - Was not sure if it make sense, alternatively we can avoid to find the Type by parameterizing + */ + case class ProfF[A]( + name: String, + year: Int, + students: List[A]) + + object attempt_5_1 { + case class Prof(value: ProfF[Prof]) + case class IdProf(id: Int, prof: ProfF[IdProf]) + } + + object attempt_5_2 { + case class Prof[F[_]](value: F[Prof[F]]) + case class IdProf[F[_]](id: Int, prof: F[IdProf[F]]) + } + + } + + import scalaz._ + //Above types could be generalized further using following + case class Fix[F[_]](unfix: F[Fix[F]]) + case class Cofree[F[_], A](head: A, tail: F[Cofree[F, A]]) + case class Free[F[_], A](head: A \/ F[Free[F, A]]) + + object using_other_combinator { + case class CofreeF[F[_], A, B](head: A, tail: F[B]) + case class FreeF[F[_], A,B](resume: A \/ F[B]) + + //Below won't compile but ? means "I don't worry about any type" + //In typed method signature if _ comes, it means "partially applied" + type Cofree[F[_], A] = Fix[CofreeF[F, A, ?]] + type Free[F[_], A] = Fix[FreeF[F, A, ?]] + + //Annotated and un-annotated ProfF trees. + type Prof = Fix[ProfF] + type IdProf = Cofree[ProfF, Int] + + } +} \ No newline at end of file diff --git a/src/main/scala/StateMonadExample2.sc b/src/main/scala/ground/learning/fp/Monad/State2/StateMonadExample2.sc similarity index 53% rename from src/main/scala/StateMonadExample2.sc rename to src/main/scala/ground/learning/fp/Monad/State2/StateMonadExample2.sc index 05207505..871bc90b 100644 --- a/src/main/scala/StateMonadExample2.sc +++ b/src/main/scala/ground/learning/fp/Monad/State2/StateMonadExample2.sc @@ -1,4 +1,4 @@ - +package ground.learning.fp.Monad.State2 import scalaz.Scalaz //http://scalaz.github.io/scalaz/scalaz-2.9.1-6.0.2/doc.sxr/scalaz/example/ExampleState.scala.html#772357 @@ -26,7 +26,8 @@ object StateMonadExample2 { } } - val increment = (x : Int) => (x, x + 1) //> increment : Int => (Int, Int) = + val increment = (x : Int) => (x, x + 1) //> increment : Int => (Int, Int) = ground.learning.fp.Monad.State2.StateMonadE + //| xample2$$$Lambda$3/1213415012@e2d56bf def number[A](tree : Tree[A]) : State[Int, Tree[(A, Int)]] = tree match { case Leaf(x) => State { s => (s + 1, Leaf(x, s)) } @@ -37,14 +38,16 @@ object StateMonadExample2 { (u._1, Branch(t._2, u._2)) } } - } //> number: [A](tree: StateMonadExample2.Tree[A])StateMonadExample2.State[Int,S - //| tateMonadExample2.Tree[(A, Int)]] + } //> number: [A](tree: ground.learning.fp.Monad.State2.StateMonadExample2.Tree[A + //| ])ground.learning.fp.Monad.State2.StateMonadExample2.State[Int,ground.learn + //| ing.fp.Monad.State2.StateMonadExample2.Tree[(A, Int)]] val tree = Branch(Leaf("one"), Branch(Leaf("two"), Branch(Leaf("three"), Branch(Leaf("four"), Leaf("five"))))) - //> tree : StateMonadExample2.Branch[String] = Branch(Leaf(one),Branch(Leaf(tw - //| o),Branch(Leaf(three),Branch(Leaf(four),Leaf(five))))) + //> tree : ground.learning.fp.Monad.State2.StateMonadExample2.Branch[String] = + //| Branch(Leaf(one),Branch(Leaf(two),Branch(Leaf(three),Branch(Leaf(four),Lea + //| f(five))))) - number(tree).run(1) //> res0: (Int, StateMonadExample2.Tree[(String, Int)]) = (6,Branch(Leaf((one,1 - //| )),Branch(Leaf((two,2)),Branch(Leaf((three,3)),Branch(Leaf((four,4)),Leaf(( - //| five,5))))))) + number(tree).run(1) //> res0: (Int, ground.learning.fp.Monad.State2.StateMonadExample2.Tree[(String + //| , Int)]) = (6,Branch(Leaf((one,1)),Branch(Leaf((two,2)),Branch(Leaf((three, + //| 3)),Branch(Leaf((four,4)),Leaf((five,5))))))) } \ No newline at end of file diff --git a/src/main/scala/state.sc b/src/main/scala/ground/learning/fp/Monad/State2/state.sc similarity index 68% rename from src/main/scala/state.sc rename to src/main/scala/ground/learning/fp/Monad/State2/state.sc index 96544f8a..ed6b015b 100644 --- a/src/main/scala/state.sc +++ b/src/main/scala/ground/learning/fp/Monad/State2/state.sc @@ -1,3 +1,4 @@ +package ground.learning.fp.Monad.State2 //http://patterngazer.blogspot.sg/2012/01/changing-my-state-of-mind-with-monad-in.html object state { @@ -42,10 +43,9 @@ object state { _ <- push(7) _ <- push(9) _ <- pop - } yield () //> result : state.StateMonad[Unit,List[Int]] = state$StateMonad$$anon$1@6a38e5 - //| 7f + } yield () - println(result(List(1))._2) //> List(7, 5, 3, 1) + println(result(List(1))._2) val otherResult = push(3).flatMap { _ => push(5).flatMap { _ => @@ -55,9 +55,8 @@ object state { } } } - } //> otherResult : state.StateMonad[Unit,List[Int]] = state$StateMonad$$anon$1@ - //| 5c3bd550 + } - println(otherResult(List(1))._2) //> List(7, 5, 3, 1) + println(otherResult(List(1))._2) } \ No newline at end of file diff --git a/src/main/scala/ground/learning/json/Json4sJacksonParser.scala b/src/main/scala/ground/learning/json/Json4sJacksonParser.scala new file mode 100644 index 00000000..fe194150 --- /dev/null +++ b/src/main/scala/ground/learning/json/Json4sJacksonParser.scala @@ -0,0 +1,38 @@ +package ground.learning.json + +import org.json4s._ +import org.json4s.jackson.JsonMethods._ +import org.json4s.JsonDSL._ +import com.fasterxml.jackson.databind.ObjectMapper +import java.util.HashMap + +object Json4sJacksonParser extends App { + + val jsonString = """ + { "name": "joe", + "children": [ + { + "name": "Mary", + "age": 5 + }, + { + "name": "Mazy", + "age": 3 + } + ] + } + """ + + val objectMapper = new ObjectMapper(); + val myMap = objectMapper.readValue(jsonString, classOf[HashMap[String, Any]]); + System.out.println("Map is: " + myMap); + + val jsonObject = parse(jsonString) + + println(jsonObject.getClass) + println(jsonObject) + + //implicit def something(t: JValue): MonadicJValue = ??? + + println(jsonObject \ "name") +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/json/JsonNativeParser.scala b/src/main/scala/ground/learning/json/JsonNativeParser.scala new file mode 100644 index 00000000..f7120a15 --- /dev/null +++ b/src/main/scala/ground/learning/json/JsonNativeParser.scala @@ -0,0 +1,10 @@ +package ground.learning.json + +import scala.util.parsing.json.JSON + +object JsonNativeParser extends App { + val json = """{"name" : "value"}""" + val jsonObject = JSON.parseRaw(json) + println(JSON.getClass) + println(JSON.parseRaw(json)) +} \ No newline at end of file diff --git a/src/main/scala/ground/learning/parser/MiniParser.scala b/src/main/scala/ground/learning/parser/MiniParser.scala new file mode 100644 index 00000000..ed3c3b75 --- /dev/null +++ b/src/main/scala/ground/learning/parser/MiniParser.scala @@ -0,0 +1,253 @@ +package ground.learning.parser + +import scala.util.parsing.combinator.RegexParsers +import scala.collection.mutable.Map + +object MiniParser { + + def main(args: Array[String]): Unit = { + val expr = """ + def mod(x, y) ={ + if(x < y){ + x + }else{ + mod(x - y, y) + } +}; + +def fizzbuzz(x) ={ + def impl(y)={ + if(y < x + 1){ + if(mod(y,3) < 1){ + if(mod(y,5) < 1){ + println("fizzbuzz") + }else{ + println("fizz") + } + }else if(mod(y,5)< 1){ + println("buzz") + }else{ + println(y) + }; + impl(y + 1) + }else{ + message("Finish" + x); + 0 + } + }; + impl(1) +}; +fizzbuzz(20); +""" + val parser = new MiniParser + val ast = parser.parse(expr).get + + val visitor = new ExprVisitor + val env = new Environment(None) + env.set("println", PrintLineFunc) + env.set("message", MessageFunc) + var result = visitor.eval(env, ast); + } +} + +trait EmbeddedFunc{ + def exec(params:List[Any]):Any +} + +object PrintLineFunc extends EmbeddedFunc{ + def exec(params:List[Any])={ + params match{ + case Nil =>{ + println() + Nil + } + case v::_ =>{ + println(v.toString) + v + } + } + } +} + +object MessageFunc extends EmbeddedFunc{ + def exec(params:List[Any])={ + params match{ + case Nil => {} + case v::_ =>{ + javax.swing.JOptionPane.showMessageDialog(null, v.toString) + v + } + } + } +} + +class Environment(parent:Option[Environment]){ + val variables = Map[String, Any]() + def get(key:String):Any = { + if(variables.contains(key)){ + variables(key) + }else{ + parent match{ + case Some(p) => p.get(key) + case None => throw new Exception("symbol'%s' not found".format(key)) + } + } + + } + def set(key:String, value:Any){ + variables(key) = value + } +} + + +class ExprVisitor{ + def eval(env:Environment, ast:AST):Any = { + def visit(ast:AST):Any = { + ast match{ + case Lines(exprs) =>{ + val local = new Environment(Some(env)) + exprs.foldLeft(Unit:Any){(result, x) => eval(local, x)} + } + case IfExpr(cond, pos, neg) =>{ + visit(cond) match{ + case true => visit(pos) + case false => visit(neg) + } + } + case LessOp(left, right) =>{ + (visit(left), visit(right)) match{ + case (lval:Int, rval:Int) => lval < rval + } + } + case AddOp(left, right) =>{ + (visit(left), visit(right)) match{ + case (lval:Int, rval:Int) => lval + rval + case (lval:String, rval) => lval + rval + case (lval, rval:String) => lval + rval + } + } + case SubOp(left, right) =>{ + (visit(left), visit(right)) match{ + case (lval:Int, rval:Int) => lval - rval + } + } + case MulOp(left, right) =>{ + (visit(left), visit(right)) match{ + case (lval:Int, rval:Int) => lval * rval + } + } + case IntVal(value) => value + case StringVal(value) => value + case Ident(name) => { + env.get(name) + } + case Assignment(vr, value) =>{ + val v = visit(value) + env.set(vr, v) + } + case FuncDef(name, func) =>{ + env.set(name, Closure(env, func)) + } + case FuncCall(func, params) =>{ + visit(func) match{ + case f:Closure => { + val local = new Environment(Some(f.env)) + (f.func.params zip params).foreach{case(pn, a) => + local.set(pn, visit(a)) + } + eval(local, f.func.proc) + } + case f:EmbeddedFunc => { + f.exec(params.map(visit)) + } + } + } + case f:Func =>{ + Closure(env, f) + } + } + } + visit(ast) + } +} + +trait AST +case class Lines(exprs:List[AST]) extends AST +case class IfExpr(cond:AST, pos:AST, neg:AST) extends AST +case class LessOp(left: AST, right:AST) extends AST +case class AddOp(left: AST, right:AST) extends AST +case class SubOp(left: AST, right:AST) extends AST +case class MulOp(left: AST, right:AST) extends AST +case class StringVal(value: String) extends AST +case class IntVal(value: Int) extends AST +case class Ident(name: String) extends AST +case class Assignment(variable: String, value: AST) extends AST + +case class Func(params:List[String], proc:AST) extends AST +case class FuncDef(name: String, func: Func) extends AST +case class FuncCall(func:AST, params:List[AST]) extends AST + +case class Closure(env: Environment, func: Func) + +class MiniParser extends RegexParsers{ + //lines ::= expr {";" expr} [";"] + def lines: Parser[AST] = repsep(line, ";") <~ opt(";")^^Lines + def line: Parser[AST] = expr | assignment | funcDef + //expr ::= cond | if + def expr: Parser[AST] = condOp|ifExpr + //if ::= "if" "(" expr ")" expr "else" expr + def ifExpr: Parser[AST] = "if"~"("~>expr~")"~expr~"else"~expr^^{ + case cond~_~pos~_~neg => IfExpr(cond, pos, neg) + } + //cond ::= add {"<" add} + def condOp: Parser[AST] = chainl1(add, + "<"^^{_ => (left:AST, right:AST) => LessOp(left, right)}) + //add ::= term {"+" term | "-" term}. + def add: Parser[AST] = chainl1(term, + "+"^^{_ => (left:AST, right:AST) => AddOp(left, right)}| + "-"^^{_ => (left:AST, right:AST) => SubOp(left, right)}) + //term ::= factor {"*" factor} + def term : Parser[AST] = chainl1(funcCall, + "*"^^{_ => (left:AST, right:AST) => MulOp(left, right)}) + def funcCall: Parser[AST] = factor~rep("("~>repsep(expr, ",")<~")")^^{ + case fac~Nil => fac + case fac~params =>{ + //関数 + params.foldLeft(fac)(FuncCall(_,_)) + } + } + + //factor ::= intLiteral | stringLiteral | "(" expr ")" |funcLiteral + def factor: Parser[AST] = intLiteral | stringLiteral | ident | + "("~>expr<~")" | funcLiteral + //funcLiteral ::= "{" [[ident {"," ident}] "=>"] lines "}" + def funcLiteral: Parser[AST] = "{"~>opt(repsep(ident, ",")<~"=>")~lines<~"}"^^{ + case p~x => { + p match{ + case Some(param) => Func(param.map(_.name), x) + case None => x + } + } + } + //intLiteral ::= ["1"-"9"] {"0"-"9"} + def intLiteral : Parser[AST] = """[1-9][0-9]*|0""".r^^{ + value => IntVal(value.toInt)} + //stringLiteral ::= "\"" {"a-zA-Z0-9.."} "\"" + def stringLiteral : Parser[AST] = "\""~>"""[a-zA-Z0-9:*/+\- ]*""".r<~"\""^^StringVal + def ident :Parser[Ident] = """[A-Za-z_][a-zA-Z0-9]*""".r^?{ + case n if n != "if" && n!= "val" && n != "def" => n}^^Ident + def assignment:Parser[Assignment] = "val"~>ident~"="~expr^^{ + case v~_~value => Assignment(v.name, value) + } + def funcDef:Parser[FuncDef] = "def"~>ident~opt("("~>repsep(ident, ",")<~")")~"="~expr^^{ + case v~params~_~proc => { + val p = params match{ + case Some(pr) => pr + case None => Nil + } + FuncDef(v.name, Func(p.map(_.name), proc)) + } + } + + def parse(str:String) = parseAll(lines, str) +} diff --git a/src/main/scala/ground/learning/parser/ParserCombinator.scala b/src/main/scala/ground/learning/parser/ParserCombinator.scala new file mode 100644 index 00000000..8abd895e --- /dev/null +++ b/src/main/scala/ground/learning/parser/ParserCombinator.scala @@ -0,0 +1,49 @@ +//package ground.learning.parser +// +//import scala.util.Try +// +//object ParserCombinator { +// +// val syntax = """ +// (3 + 4) +// (3 * 4) +// assignment := variable = addExpr || multExpr +// """ +// +// type Parser[+T] = String => T +// +// trait Parser2[+T] { +// def parse(input: String): T +// } +// +// def numberParser1(input: String): (Option[Int], String) = { +// val digit = input.toList.takeWhile(_.isDigit).mkString +// (Try(Integer.valueOf(digit).toInt).toOption, input.replaceFirst(digit, "")) +// } +// +// def numberParser(): Parser[(Option[Int], String)] = { input: String => +// val digit = input.toList.takeWhile(_.isDigit).mkString +// (Try(Integer.valueOf(digit).toInt).toOption, input.replaceFirst(digit, "")) +// } +// +// +// def opParser(): Parser[(Option[Char], String)] = { input: String => +// if (input.charAt(0) == '+' || input.charAt(0) == '*') { +// (Some(input.charAt(0)), input.substring(1)) +// } else { +// (None, input) +// } +// } +// +// def expParser(input: String): (List[Int], Char) = { +// val number = numberParser(input) +// val operator = opParser(number._2) +// val number2 = numberParser(operator._2) +// (List(number._1.get, number2._1.get), operator._1.get) +// } +// +// def main(args: Array[String]) { +// println(expParser("3+2")) +// } +// +//} \ No newline at end of file diff --git a/src/main/scala/ground/learning/text/Cosine.scala b/src/main/scala/ground/learning/text/Cosine.scala new file mode 100644 index 00000000..e121aa55 --- /dev/null +++ b/src/main/scala/ground/learning/text/Cosine.scala @@ -0,0 +1,19 @@ +//package ground.learning.text +//import java.util.Arrays; +//import java.util.Map; +//import java.util.function.Function; +//import org.apache.commons.text.similarity.CosineSimilarity; +//import java.util.stream.Collectors._; +// +//object Cosine extends App { +// val record = "The package descriptions in the JavaDoc give an overview of the available features and various project reports are provided."; +// val duplicate_record = "The descriptions of the package in the JavaDoc give an overview of features and various project reports are provided."; +// +// System.out.println("starting"); +// val r1 = Arrays.stream(record.split(" ")).collect(groupingBy(Function.identity(), summingInt(x => 1))); +// val r2 = Arrays.stream(duplicate_record.split(" ")).collect(groupingBy(Function.identity(), summingInt(x => 1))); +// val similarity = new CosineSimilarity(); +// val t = similarity.cosineSimilarity(r1, r2); +// System.out.println(t); +// +//} \ No newline at end of file diff --git a/src/main/scala/ground/worksheet/scalacheck/InterleaveSpec.scala b/src/main/scala/ground/worksheet/scalacheck/InterleaveSpec.scala index ba389335..98b16d3a 100644 --- a/src/main/scala/ground/worksheet/scalacheck/InterleaveSpec.scala +++ b/src/main/scala/ground/worksheet/scalacheck/InterleaveSpec.scala @@ -4,6 +4,7 @@ import Interleaver._ import org.scalatest._ import prop._ import org.scalacheck.Prop.{ AnyOperators, forAll, all } +import java.lang.Math class InterleaveSpec extends PropSpec with Checkers { diff --git a/src/main/scala/ground/worksheet/scalacheck/InterleaveSpecUsingPropertyChecks.scala b/src/main/scala/ground/worksheet/scalacheck/InterleaveSpecUsingPropertyChecks.scala index 7b174472..cb9b0f18 100644 --- a/src/main/scala/ground/worksheet/scalacheck/InterleaveSpecUsingPropertyChecks.scala +++ b/src/main/scala/ground/worksheet/scalacheck/InterleaveSpecUsingPropertyChecks.scala @@ -5,23 +5,25 @@ import org.scalatest._ import prop._ import org.scalacheck.Prop.{ AnyOperators, forAll, all } - import Interleaver._ - import org.scalatest._ - import prop._ - import Matchers._ - - class InterleaveSpecUsingPropertyChecks extends PropSpec with PropertyChecks { - - property("the interleave method must interleave lists") { - forAll { (l1: List[Int], l2: List[Int]) => - val res = interleave(l1,l2) - val is = (0 to Math.min(l1.length, - l2.length)-1).toList - l1.length + l2.length shouldBe res.length - l1 shouldBe is.map(i => res(2*i)) ++ - res.drop(2*l2.length) - l2 shouldBe is.map(i => res(2*i+1)) ++ - res.drop(2*l1.length) - } +import Interleaver._ +import org.scalatest._ +import prop._ +import Matchers._ +import java.lang.Math + +class InterleaveSpecUsingPropertyChecks extends PropSpec with PropertyChecks { + + property("the interleave method must interleave lists") { + forAll { (l1: List[Int], l2: List[Int]) => + val res = interleave(l1, l2) + val is = (0 to Math.min( + l1.length, + l2.length) - 1).toList + l1.length + l2.length shouldBe res.length + l1 shouldBe is.map(i => res(2 * i)) ++ + res.drop(2 * l2.length) + l2 shouldBe is.map(i => res(2 * i + 1)) ++ + res.drop(2 * l1.length) } - } \ No newline at end of file + } +} \ No newline at end of file diff --git a/src/main/scala/lamdbda.sc b/src/main/scala/lamdbda.sc deleted file mode 100644 index bdae3475..00000000 --- a/src/main/scala/lamdbda.sc +++ /dev/null @@ -1,14 +0,0 @@ -object lamdbda { - println("Welcome to the Scala worksheet") //> Welcome to the Scala worksheet - val x = () => 3 //> x : () => Int = lamdbda$$$Lambda$8/1641808846@13a57a3b - - val add : Int => Int = { x => - x + 3 //> add : Int => Int = lamdbda$$$Lambda$9/972765878@6276ae34 - } - - val someInt = () => 3 //> someInt : () => Int = lamdbda$$$Lambda$10/2034688500@3c09711b - someInt() //> res0: Int = 3 - - //val someInt :=> Int = - -} \ No newline at end of file diff --git a/src/main/scala/pain.sc b/src/main/scala/pain.sc deleted file mode 100644 index b47484e7..00000000 --- a/src/main/scala/pain.sc +++ /dev/null @@ -1,20 +0,0 @@ -import scala.util.Either._ -import scala.util.Try._ -import scala.util.Try - -object pain { - - val listOfEmployeeId : List[String] = Nil - val findHike : String => Some[Int] = (x : String) => Some(0) - val updatePay : (String, Int) => Either[String, Int] = (x : String, y : Int) => if (x.trim().length() > 0) Right(10 + y) else Left("Sorry") - val updateDb : Int => Try[Boolean] = pay => Try ( pay > 100000000 ) - - - val t = for { - employee <- listOfEmployeeId - hike <- findHike(employee) - success <- updateDb(hike) - //newPay <- updatePay("t", 10) - } yield success - -} \ No newline at end of file diff --git a/src/main/webapp/random_numbers.html b/src/main/webapp/random_numbers.html new file mode 100644 index 00000000..bf640328 --- /dev/null +++ b/src/main/webapp/random_numbers.html @@ -0,0 +1,22 @@ + + + Random + + +
+
+
+
+
+
+
+ + + + diff --git a/src/main/webapp/static/d3js/bullet.js b/src/main/webapp/static/d3js/bullet.js new file mode 100644 index 00000000..5765a5b0 --- /dev/null +++ b/src/main/webapp/static/d3js/bullet.js @@ -0,0 +1,241 @@ +(function() { + +// Chart design based on the recommendations of Stephen Few. Implementation +// based on the work of Clint Ivy, Jamie Love, and Jason Davies. +// http://projects.instantcognition.com/protovis/bulletchart/ +d3.bullet = function() { + var orient = "left", // TODO top & bottom + reverse = false, + duration = 0, + ranges = bulletRanges, + markers = bulletMarkers, + measures = bulletMeasures, + width = 380, + height = 30, + tickFormat = null; + + // For each small multiple… + function bullet(g) { + g.each(function(d, i) { + var rangez = ranges.call(this, d, i).slice().sort(d3.descending), + markerz = markers.call(this, d, i).slice().sort(d3.descending), + measurez = measures.call(this, d, i).slice().sort(d3.descending), + g = d3.select(this); + + // Compute the new x-scale. + var x1 = d3.scaleLinear() + .domain([0, Math.max(rangez[0], markerz[0], measurez[0])]) + .range(reverse ? [width, 0] : [0, width]); + + // Retrieve the old x-scale, if this is an update. + var x0 = this.__chart__ || d3.scaleLinear() + .domain([0, Infinity]) + .range(x1.range()); + + // Stash the new scale. + this.__chart__ = x1; + + // Derive width-scales from the x-scales. + var w0 = bulletWidth(x0), + w1 = bulletWidth(x1); + + // Update the range rects. + var range = g.selectAll("rect.range") + .data(rangez); + + range.enter().append("rect") + .attr("class", function(d, i) { return "range s" + i; }) + .attr("width", w0) + .attr("height", height) + .attr("x", reverse ? x0 : 0) + .transition() + .duration(duration) + .attr("width", w1) + .attr("x", reverse ? x1 : 0); + + range.transition() + .duration(duration) + .attr("x", reverse ? x1 : 0) + .attr("width", w1) + .attr("height", height); + + // Update the measure rects. + var measure = g.selectAll("rect.measure") + .data(measurez); + + measure.enter().append("rect") + .attr("class", function(d, i) { return "measure s" + i; }) + .attr("width", w0) + .attr("height", height / 3) + .attr("x", reverse ? x0 : 0) + .attr("y", height / 3) + .transition() + .duration(duration) + .attr("width", w1) + .attr("x", reverse ? x1 : 0); + + measure.transition() + .duration(duration) + .attr("width", w1) + .attr("height", height / 3) + .attr("x", reverse ? x1 : 0) + .attr("y", height / 3); + + // Update the marker lines. + var marker = g.selectAll("line.marker") + .data(markerz); + + marker.enter().append("line") + .attr("class", "marker") + .attr("x1", x0) + .attr("x2", x0) + .attr("y1", height / 6) + .attr("y2", height * 5 / 6) + .transition() + .duration(duration) + .attr("x1", x1) + .attr("x2", x1); + + marker.transition() + .duration(duration) + .attr("x1", x1) + .attr("x2", x1) + .attr("y1", height / 6) + .attr("y2", height * 5 / 6); + + // Compute the tick format. + var format = tickFormat || x1.tickFormat(8); + + // Update the tick groups. + var tick = g.selectAll("g.tick") + .data(x1.ticks(8), function(d) { + return this.textContent || format(d); + }); + + // Initialize the ticks with the old scale, x0. + var tickEnter = tick.enter().append("g") + .attr("class", "tick") + .attr("transform", bulletTranslate(x0)) + .style("opacity", 1e-6); + + tickEnter.append("line") + .attr("y1", height) + .attr("y2", height * 7 / 6); + + tickEnter.append("text") + .attr("text-anchor", "middle") + .attr("dy", "1em") + .attr("y", height * 7 / 6) + .text(format); + + // Transition the entering ticks to the new scale, x1. + tickEnter.transition() + .duration(duration) + .attr("transform", bulletTranslate(x1)) + .style("opacity", 1); + + // Transition the updating ticks to the new scale, x1. + var tickUpdate = tick.transition() + .duration(duration) + .attr("transform", bulletTranslate(x1)) + .style("opacity", 1); + + tickUpdate.select("line") + .attr("y1", height) + .attr("y2", height * 7 / 6); + + tickUpdate.select("text") + .attr("y", height * 7 / 6); + + // Transition the exiting ticks to the new scale, x1. + tick.exit().transition() + .duration(duration) + .attr("transform", bulletTranslate(x1)) + .style("opacity", 1e-6) + .remove(); + }); + d3.timer.flush(); + } + + // left, right, top, bottom + bullet.orient = function(x) { + if (!arguments.length) return orient; + orient = x; + reverse = orient == "right" || orient == "bottom"; + return bullet; + }; + + // ranges (bad, satisfactory, good) + bullet.ranges = function(x) { + if (!arguments.length) return ranges; + ranges = x; + return bullet; + }; + + // markers (previous, goal) + bullet.markers = function(x) { + if (!arguments.length) return markers; + markers = x; + return bullet; + }; + + // measures (actual, forecast) + bullet.measures = function(x) { + if (!arguments.length) return measures; + measures = x; + return bullet; + }; + + bullet.width = function(x) { + if (!arguments.length) return width; + width = x; + return bullet; + }; + + bullet.height = function(x) { + if (!arguments.length) return height; + height = x; + return bullet; + }; + + bullet.tickFormat = function(x) { + if (!arguments.length) return tickFormat; + tickFormat = x; + return bullet; + }; + + bullet.duration = function(x) { + if (!arguments.length) return duration; + duration = x; + return bullet; + }; + + return bullet; +}; + +function bulletRanges(d) { + return d.ranges; +} + +function bulletMarkers(d) { + return d.markers; +} + +function bulletMeasures(d) { + return d.measures; +} + +function bulletTranslate(x) { + return function(d) { + return "translate(" + x(d) + ",0)"; + }; +} + +function bulletWidth(x) { + var x0 = x(0); + return function(d) { + return Math.abs(x(d) - x0); + }; +} + +})(); \ No newline at end of file diff --git a/src/main/webapp/static/d3js/bullet_charts.html b/src/main/webapp/static/d3js/bullet_charts.html new file mode 100644 index 00000000..f4e27471 --- /dev/null +++ b/src/main/webapp/static/d3js/bullet_charts.html @@ -0,0 +1,122 @@ + + + + + Bullet chart + + + + + + + + + + + + + diff --git a/src/main/webapp/static/d3js/bullets.json b/src/main/webapp/static/d3js/bullets.json new file mode 100644 index 00000000..b0b43ba7 --- /dev/null +++ b/src/main/webapp/static/d3js/bullets.json @@ -0,0 +1,7 @@ +[ + {"title":"Revenue","subtitle":"US$, in thousands","ranges":[150,225,300],"measures":[220,270],"markers":[250]}, + {"title":"Profit","subtitle":"%","ranges":[20,25,30],"measures":[21,23],"markers":[26]}, + {"title":"Order Size","subtitle":"US$, average","ranges":[350,500,600],"measures":[100,320],"markers":[550]}, + {"title":"New Customers","subtitle":"count","ranges":[1400,2000,2500],"measures":[1000,1650],"markers":[2100]}, + {"title":"Satisfaction","subtitle":"out of 5","ranges":[3.5,4.25,5],"measures":[3.2,4.7],"markers":[4.4]} +] \ No newline at end of file diff --git a/src/main/webapp/static/d3js/index_babel_d3js.2.html b/src/main/webapp/static/d3js/index_babel_d3js.2.html new file mode 100644 index 00000000..05ab8fdc --- /dev/null +++ b/src/main/webapp/static/d3js/index_babel_d3js.2.html @@ -0,0 +1,62 @@ + + + + + +
+ + + + + + diff --git a/src/main/webapp/static/d3js/index_babel_d3js.html b/src/main/webapp/static/d3js/index_babel_d3js.html new file mode 100644 index 00000000..37292761 --- /dev/null +++ b/src/main/webapp/static/d3js/index_babel_d3js.html @@ -0,0 +1,108 @@ + + + + + +
+
+
+
+
+ + + + + diff --git a/src/main/webapp/static/d3js/index_path_generator_d3js.3.html b/src/main/webapp/static/d3js/index_path_generator_d3js.3.html new file mode 100644 index 00000000..98230cba --- /dev/null +++ b/src/main/webapp/static/d3js/index_path_generator_d3js.3.html @@ -0,0 +1,103 @@ + + + + + +
+ + + + + + diff --git a/src/main/webapp/static/d3js/index_transform_d3js.4.html b/src/main/webapp/static/d3js/index_transform_d3js.4.html new file mode 100644 index 00000000..023ce7c0 --- /dev/null +++ b/src/main/webapp/static/d3js/index_transform_d3js.4.html @@ -0,0 +1,85 @@ + + + + + +
+ + + + + + diff --git a/src/test/java/ground/learning/java/coffee/CoffeeToJsTest.java b/src/test/java/ground/learning/java/coffee/CoffeeToJsTest.java deleted file mode 100644 index 3ec2bf07..00000000 --- a/src/test/java/ground/learning/java/coffee/CoffeeToJsTest.java +++ /dev/null @@ -1,38 +0,0 @@ -package ground.learning.java.coffee; - - -import ground.learning.java.coffee.CoffeeToJs; - -import javax.script.ScriptException; - -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; - -public class CoffeeToJsTest { - static CoffeeToJs coffeeToJs = new CoffeeToJs(); - - @Test - public void empty() throws ScriptException { - String js = new CoffeeToJs().toJs(""); - Assert.assertThat(js, CoreMatchers.equalTo("\n")); - } - - @Test - public void to_javascript() throws ScriptException { - String js = coffeeToJs.toJs("life=42"); - - Assert.assertThat(js, CoreMatchers.equalTo("var life;\n\nlife = 42;\n")); - } - - @Test - public void exec_javascript() throws ScriptException { - String js = coffeeToJs.toJs(coffeeToJs.readScript("/js/testCoffee.js")); - coffeeToJs.executeJs(js); - - //Check console ouput for "Hello, CoffeeScript World!" - } - - - -} \ No newline at end of file diff --git a/src/test/java/oms/OrderProcessorTest.java b/src/test/java/oms/OrderProcessorTest.java new file mode 100644 index 00000000..49019d68 --- /dev/null +++ b/src/test/java/oms/OrderProcessorTest.java @@ -0,0 +1,53 @@ +package oms; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import java.util.Optional; + +public class OrderProcessorTest { + + @Before + public void clear() { + OrderProcessor.getInstance().clear(); + } + + @Test + public void singleOrder() { + Order order = Order.create(Optional.empty()); + OrderProcessor.getInstance().processOrder(order); + Assert.assertTrue(OrderProcessor.getInstance().bestOrder(OrderType.SELL).equals(order)); + } + + @Test + public void bestOfTwoSellOrders() { + Order modelOrder = Order.create(Optional.empty()); + Order orderSecond = Order.create(Optional.empty()); + modelOrder.setPrice(10); + orderSecond.setPrice(modelOrder.getPrice() - 1); + + OrderProcessor.getInstance().processOrder(modelOrder); + OrderProcessor.getInstance().processOrder(orderSecond); + + Assert.assertEquals(OrderProcessor.getInstance().traderBlotterSize(), 2); + Assert.assertEquals(OrderProcessor.getInstance().bestOrder(OrderType.SELL), orderSecond); + Assert.assertEquals(OrderProcessor.getInstance().bestOrder(OrderType.SELL).getPrice(), orderSecond.getPrice()); + } + + @Test + public void bestOfTwoBuyOrders() { + Order modelOrder = Order.create(Optional.empty()); + modelOrder.setType(OrderType.BUY); + modelOrder.setPrice(10); + + Order orderSecond = Order.create(Optional.of(modelOrder)); + orderSecond.setPrice(modelOrder.getPrice() + 10); + + OrderProcessor.getInstance().processOrder(modelOrder); + OrderProcessor.getInstance().processOrder(orderSecond); + + Assert.assertEquals(OrderProcessor.getInstance().traderBlotterSize(), 2); + Assert.assertEquals(OrderProcessor.getInstance().bestOrder(OrderType.BUY), orderSecond); + Assert.assertEquals(OrderProcessor.getInstance().bestOrder(OrderType.BUY).getPrice(), orderSecond.getPrice()); + } +} diff --git a/tempCodeRunnerFile.py b/tempCodeRunnerFile.py new file mode 100644 index 00000000..11f7b890 --- /dev/null +++ b/tempCodeRunnerFile.py @@ -0,0 +1 @@ +[print(str(2) + "*" + str(x) + "= " + str(2*x)) for x in range(1,17) ] \ No newline at end of file