Skip to content

Commit

Permalink
Merge branch 'passthroughargs-docs' of https://github.com/svotaw/Syna…
Browse files Browse the repository at this point in the history
…pseML into passthroughargs-docs
  • Loading branch information
svotaw committed Nov 30, 2022
2 parents 1918ae6 + 8f3749f commit cd1d405
Show file tree
Hide file tree
Showing 35 changed files with 1,269 additions and 2,759 deletions.
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/bug_report_template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ body:
Describe the system where you encountered the bug.
value: |
- **Language version** (e.g. python 3.8, scala 2.12):
- **Spark Version** (e.g. 3.2.2):
- **Spark Version** (e.g. 3.2.3):
- **Spark Platform** (e.g. Synapse, Databricks):
validations:
required: true
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ SynapseML requires Scala 2.12, Spark 3.2+, and Python 3.6+.
| Version | [![Version](https://img.shields.io/badge/version-0.10.2-blue)](https://github.com/Microsoft/SynapseML/releases) [![Release Notes](https://img.shields.io/badge/release-notes-blue)](https://github.com/Microsoft/SynapseML/releases) [![Snapshot Version](https://mmlspark.blob.core.windows.net/icons/badges/master_version3.svg)](#sbt) |
| Docs | [![Scala Docs](https://img.shields.io/static/v1?label=api%20docs&message=scala&color=blue&logo=scala)](https://mmlspark.blob.core.windows.net/docs/0.10.2/scala/index.html#package) [![PySpark Docs](https://img.shields.io/static/v1?label=api%20docs&message=python&color=blue&logo=python)](https://mmlspark.blob.core.windows.net/docs/0.10.2/pyspark/index.html) [![Academic Paper](https://img.shields.io/badge/academic-paper-7fdcf7)](https://arxiv.org/abs/1810.08744) |
| Support | [![Gitter](https://badges.gitter.im/Microsoft/MMLSpark.svg)](https://gitter.im/Microsoft/MMLSpark?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![Mail](https://img.shields.io/badge/mail-synapseml--support-brightgreen)](mailto:[email protected]) |
| Binder | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/microsoft/SynapseML/93d7ccf?labpath=notebooks%2Ffeatures) |
| Binder | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/microsoft/SynapseML/v0.10.2?labpath=notebooks%2Ffeatures) |

<summary><strong><em>Table of Contents</em></strong></summary>

Expand Down
3 changes: 1 addition & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import scala.xml.transform.{RewriteRule, RuleTransformer}
import scala.xml.{Node => XmlNode, NodeSeq => XmlNodeSeq, _}

val condaEnvName = "synapseml"
val sparkVersion = "3.2.2"
val sparkVersion = "3.2.3"
name := "synapseml"
ThisBuild / organization := "com.microsoft.azure"
ThisBuild / scalaVersion := "2.12.15"
Expand Down Expand Up @@ -402,7 +402,6 @@ lazy val deepLearning = (project in file("deep-learning"))
.settings(settings ++ Seq(
libraryDependencies ++= Seq(
"com.microsoft.azure" % "onnx-protobuf_2.12" % "0.9.1" classifier "assembly",
"com.microsoft.cntk" % "cntk" % "2.4",
"com.microsoft.onnxruntime" % "onnxruntime_gpu" % "1.8.1"
),
name := "synapseml-deep-learning"
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import org.apache.spark.sql.{DataFrame, Dataset, Row}
import java.awt.Color
import java.awt.color.ColorSpace
import java.awt.image.BufferedImage
import java.awt.{Image => JImage}
import scala.math.round

object UnrollImage extends DefaultParamsReadable[UnrollImage] {
Expand Down Expand Up @@ -132,14 +133,31 @@ object UnrollImage extends DefaultParamsReadable[UnrollImage] {
val biOpt = ImageUtils.safeRead(bytes)
biOpt.map { bi =>
(height, width) match {
case (Some(h), Some(w)) => unrollBI(ResizeUtils.resizeBufferedImage(w, h, nChannels)(bi))
case (Some(h), Some(w)) => unrollBI(resizeBufferedImage(w, h, nChannels)(bi))
case (None, None) => unrollBI(bi)
case _ =>
throw new IllegalArgumentException("Height and width must either both be specified or unspecified")
}
}
}

private[ml] def resizeBufferedImage(width: Int,
height: Int,
channels: Option[Int])(image: BufferedImage): BufferedImage = {
val imgType = channels.map(ImageUtils.channelsToType).getOrElse(image.getType)

if (image.getWidth == width && image.getHeight == height && image.getType == imgType) {
image
} else {
val resizedImage = image.getScaledInstance(width, height, JImage.SCALE_DEFAULT)
val bufferedImage = new BufferedImage(width, height, imgType)
val g = bufferedImage.createGraphics()
g.drawImage(resizedImage, 0, 0, null) //scalastyle:ignore null
g.dispose()
bufferedImage
}
}

}

/** Converts the representation of an m X n pixel image to an m * n vector of Doubles
Expand All @@ -148,7 +166,6 @@ object UnrollImage extends DefaultParamsReadable[UnrollImage] {
*
* @param uid The id of the module
*/
@deprecated("Please use 'OnnxModel'.", since="0.10.2")
class UnrollImage(val uid: String) extends Transformer
with HasInputCol with HasOutputCol with Wrappable with DefaultParamsWritable with BasicLogging {
logClass()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,6 @@ trait BasicLogging extends Logging {
logVerb("transform", f)
}

def logPredict[T](f: => T): T = {
logVerb("predict", f)
}

def logVerb[T](verb: String, f: => T): T = {
logBase(verb)
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ object RTestGen {
| "spark.sql.shuffle.partitions=10",
| "spark.sql.crossJoin.enabled=true")
|
|sc <- spark_connect(master = "local", version = "3.2.2", config = conf)
|sc <- spark_connect(master = "local", version = "3.2.3", config = conf)
|
|""".stripMargin, StandardOpenOption.CREATE)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,7 @@ trait DotnetTestFuzzing[S <: PipelineStage] extends TestBase with DataFrameEqual
.split(".".toCharArray).map(capitalize).mkString(".")
val externalLoaderImports = conf.name match {
case "synapseml-deep-learning" =>
s"""using Synapse.ML.Cntk;
|using Synapse.ML.Onnx;
s"""using Synapse.ML.Onnx;
|using Synapse.ML.Stages;
|""".stripMargin
case _ => ""
Expand Down
157 changes: 0 additions & 157 deletions deep-learning/src/main/R/model_downloader.R

This file was deleted.

Loading

0 comments on commit cd1d405

Please sign in to comment.