scala 可以使用java的庫
scala 的工廠方法:apply
條件表達式有返回值
數(shù)組可以用to ,箭頭 <-
最后一行內(nèi)容的值是整個代碼塊的返回值
def函數(shù) 定義函數(shù),調(diào)用不按順序
函數(shù):def 函數(shù)名,參數(shù)名:類型名,可以設(shè)定默認值,后可跟=號,如def f1 ( param1:String, param2: Int = 30) = param1 + param2
有默認值的參數(shù)調(diào)用時可以不加參數(shù),另外調(diào)用時如果指定參數(shù)名的話可以不考慮參數(shù)順序。
參數(shù)數(shù)量可變:def sum(numbers:Int*) *表示變長 調(diào)用時不可sum(0 to 100) ,因為0 to 100是range類型,而參數(shù)中要求是變量,但是可以用 0 to 100 _* ,表示變成多個值
過程:無返回值的函數(shù),定義函數(shù)返回值為Unit,在參數(shù)列表之后加上:Unit ,或者將函數(shù)定義后的=改變成花括號
lazy 類型:第一次被定義時計算
異常
try { val content = fromFile("/usr/local/spark/sfijweoijgr/")}catch{ case _: FileNotFoundException => println("Ooooops!!! File not found")} finally { println("Byebye world!")}
集合
數(shù)組val arr = new ArrayInt
ArrayBuffer的insert,remove方法
scala> val arr1 = Array("Scala","Spark")arr1: Array[String] = Array(Scala, Spark)scala> val arr1 = Array.apply("Scala","Spark")arr1: Array[String] = Array(Scala, Spark)scala> Arrayres3: Array.type = scala.Array$@54d18072scala> arr1(2) = "Hadoop"java.lang.ArrayIndexOutOfBoundsException: 2 ... 33 elidedscala> val arrbuf = ArrayBuffer[Int]()<console>:7: error: not found: value ArrayBuffer val arrbuf = ArrayBuffer[Int]() ^scala> import scala.collection.mutable.AAVLIterator AbstractIterable AbstractSet ArrayBuilder ArraySeq AVLTree AbstractMap AnyRefMap ArrayLike ArrayStack AbstractBuffer AbstractSeq ArrayBuffer ArrayOps scala> import scala.collection.mutable.ArrayArrayBuffer ArrayBuilder ArrayLike ArrayOps ArraySeq ArrayStackscala> import scala.collection.mutable.ArrayBuArrayBuffer ArrayBuilderscala> import scala.collection.mutable.ArrayBufferimport scala.collection.mutable.ArrayBufferscala> val arrbuf = ArrayBuffer[Int]()arrbuf: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer()scala> arrbuf += 10res5: arrbuf.type = ArrayBuffer(10)scala> arrbufres6: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10)scala> arrbuf(1)java.lang.IndexOutOfBoundsException: 1 at scala.collection.mutable.ResizableArray$class.apply(ResizableArray.scala:43) at scala.collection.mutable.ArrayBuffer.apply(ArrayBuffer.scala:48) ... 33 elidedscala> arrbuf += ( 12,23,35,56)res8: arrbuf.type = ArrayBuffer(10, 12, 23, 35, 56)scala> arrbufres9: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10, 12, 23, 35, 56)scala> arrbuf ++= Array(1,2,3,4)<console>:1: error: illegal character '\uff08' arrbuf ++= Array(1,,2,3,4) ^<console>:1: error: illegal character '\uff0c' arrbuf ++= Array(1,,2,3,4) ^scala> arrbuf ++= Array(1,2,3,4)res10: arrbuf.type = ArrayBuffer(10, 12, 23, 35, 56, 1, 2, 3, 4)scala> arrbufres11: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10, 12, 23, 35, 56, 1, 2, 3, 4)scala> arrbuf.ttail takeWhile toIndexedSeq toMap toString transpose tails to toIterable toSeq toTraversable trimEnd take toArray toIterator toSet toVector trimStart takeRight toBuffer toList toStream transform scala> arrbuf.ttail takeWhile toIndexedSeq toMap toString transpose tails to toIterable toSeq toTraversable trimEnd take toArray toIterator toSet toVector trimStart takeRight toBuffer toList toStream transform scala> arrbuf.trimtrimEnd trimStartscala> arrbuf.trimtrimEnd trimStartscala> arrbuf.trimEnd(3)scala> arrbufres13: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10, 12, 23, 35, 56, 1)scala> arrbufres14: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10, 12, 23, 35, 56, 1)scala> arrbuf.inindexOf indexWhere init insert intersect indexOfSlice indices inits insertAll scala> arrbuf.insert(4,100)scala> arrbufres16: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10, 12, 23, 35, 100, 56, 1)scala> arrbuf.insert(6,7,8,9)scala> arrbufres18: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(10, 12, 23, 35, 100, 56, 7, 8, 9, 1)scala> arrbuf.remove(0)res19: Int = 10scala> arrbufres20: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(12, 23, 35, 100, 56, 7, 8, 9, 1)scala> arrbuf.remove(1,2)scala> arrbufres22: scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(12, 100, 56, 7, 8, 9, 1)scala> val arr2 = arrbuf.toArrayarr2: Array[Int] = Array(12, 100, 56, 7, 8, 9, 1)scala> arr2.toBufferres23: scala.collection.mutable.Buffer[Int] = ArrayBuffer(12, 100, 56, 7, 8, 9, 1)scala> arr2.toString def toString(): Stringscala> arr2.toBufferres24: scala.collection.mutable.Buffer[Int] = ArrayBuffer(12, 100, 56, 7, 8, 9, 1)scala> for ( elem <- arr2) { elem}scala> for ( elem <- arr2) {println(elem)}12100567891scala> for( i <- 1 until (arr2.length,1)) println(arr2(i))100567891scala> for( i <- 1 until (arr2.length,2)) println(arr2(i))10079scala> for( i <- 0 until (arr2.length,2)) println(arr2(i))125681scala> for( i <- 0 until (arr2.length,2)) println(arr2(i))125681scala> arr2res31: Array[Int] = Array(12, 100, 56, 7, 8, 9, 1)scala> for(i <- (0 until arr2.length).reverse) println(arr2(i))19875610012scala> import scala.util.Sorting._import scala.util.Sorting._scala> quickSort(arr2)scala> arr2res34: Array[Int] = Array(1, 7, 8, 9, 12, 56, 100)scala> val arr3 = for(i <- arr2) yield i*iarr3: Array[Int] = Array(1, 49, 64, 81, 144, 3136, 10000)scala> val arr4 = for(i <- arr2 if i%3 == 0) yield i*iarr4: Array[Int] = Array(81, 144)scala> arr2.filter(_%3 ==0).map(i => i*i)res35: Array[Int] = Array(81, 144)scala> arr2.filter{_%3 ==0}.map{i => i*i}res36: Array[Int] = Array(81, 144)scala> arr2.filter{_%3 ==0}map{i => i*i}res3: Array[Int] = Array(144, 81)
yield 把后面的每一個元素收集起來并組拼成一個集合
作業(yè):刪掉數(shù)組中第一個負數(shù)后面的所有負數(shù)
scala> val person = Map("Spark" ->6, "Hadoop" -> 11)person: scala.collection.immutable.Map[String,Int] = Map(Spark -> 6, Hadoop -> 11)scala> person("Hadoop")res4: Int = 11scala> val person = scala.collection.mutable.Map("Spark" ->6, "Hadoop" -> 11)person: scala.collection.mutable.Map[String,Int] = Map(Hadoop -> 11, Spark -> 6)scala> person += ("Flink" -> 5)res5: person.type = Map(Hadoop -> 11, Spark -> 6, Flink -> 5)scala> person -= "Flink"res6: person.type = Map(Hadoop -> 11, Spark -> 6)scala> val sparkValue = if (person.contains("Spark")) person("Spark") else 1000sparkValue: Int = 6scala> val sparkValue = person.getOrElsegetOrElse getOrElseUpdatescala> val sparkValue = person.getOrElse("Spark", 1000)sparkValue: Int = 6scala> val sparkValue = person.getOrElse("Flink", 1000)sparkValue: Int = 1000scala> for((key,value) <- person) println(key+":"+value)Hadoop:11Spark:6scala> for((key,value) <- person) println(key+":")Hadoop:Spark:scala> val person = scala.collection.mutable.S.Map("Spark" ->6, "Hadoop" -> 11)Seq SetProxy Subscriber SynchronizedSet SeqLike SortedSet SynchronizedBuffer SynchronizedStack Set Stack SynchronizedMap SetBuilder StackProxy SynchronizedPriorityQueue SetLike StringBuilder SynchronizedQueue scala> val person = scala.collection.immutable.S.Map("Spark" ->6, "Hadoop" -> 11)Seq SetProxy SortedSet Stream StreamView StringLike Set SortedMap Stack StreamIterator StreamViewLike StringOps scala> val person = scala.collection.immutable.SortedMap("Spark" ->6, "Hadoop" -> 11)person: scala.collection.immutable.SortedMap[String,Int] = Map(Hadoop -> 11, Spark -> 6)TUPLE:scala> val tuple = ("Spark", 6, 99.0)tuple: (String, Int, Double) = (Spark,6,99.0)scala> tuple._1res9: String = Sparkscala> tuple._2res10: Int = 6scala> tuple._3res11: Double = 99.0