数组作业:
val a = Array(-2,-1,0,2,1,-1)
var bool :Boolean =true
for (elem <- a)
{
if (bool)
{
if(elem<0)
{
println (elem)
bool=false}
}
if (elem>=0)
println(elem)
}
上课笔记:
今日学习内容:
代码相关:
函数结构例子
{
val n=10
def f1:Any=
{for (i <- 1 to 10 )
{if (i==n) return i
println(i)}}
}
文字输出输入例子
{
readLine
readLine("password:")
println("Spark")
printf(带参数)]
}
文件读取例子
{
try
{lazy val content =fromFile("/usr/local/spark/spark-1.5.0-bin-hadoop1/README.md").mkString}
catch
{case _:FileNotFoundException =>println("Oops!!!File not found")}
finally
{println("Byebye world!!!")}
}
数组相关:
{
val arr=new Array[Int](5) #(0,0,0,0,0) 非可变数组
ArrayBuffer [can add element]
#import scala.collection.mutable._可变数组类库
构造可变数组:
ArrayBuffer val arrBuffer = ArrayBuffer[Int]()
Insert:
arrBuffer +=10 #加一个元素
arrBuffer +=(1,55,233,514,1,56) #增加多个元素
arrBuffer ++=Array(1,2,3,4) #增加一个数组
Delete:
arrBuffer.trimEnd(3)#delete 最后3个元素
remove
arrBuffer.remove(10)
mkString:
arr2.mkString
res5: String = 231541215242154454
arr2.mkString(",")
res6: String = 231,54,121,5,2,4,2,154,454
}
filter相关:
{
arr2.filter(_%3==0).map(i=>i*i) (53361, 2916)
}
Map相关:
{
val person = Map("Spark" -> 6,"Hadoop"-> 11)(不可增加)
val person =scala.collection.mutable.Map("Spark" -> 6,"Hadoop"-> 11)(可增加)
Insert:
person+=("Flink"->5)
Delete:
person-=("Flink")
操作实例:
val sparkValue = if(person.contains("Spark"))person("Spark") else 1000
val sparkValue=person.getOrElse("Spark",1000)
for((key,value)<- person) println(key+":"+value)
for(key<- person.keySet) println(key+":")
}