diff --git "a/Spark Streaming \346\272\220\347\240\201\350\247\243\346\236\220\347\263\273\345\210\227/2.1 JobScheduler, Job, JobSet \350\257\246\350\247\243.md" "b/Spark Streaming \346\272\220\347\240\201\350\247\243\346\236\220\347\263\273\345\210\227/2.1 JobScheduler, Job, JobSet \350\257\246\350\247\243.md" index b4af47d..d920146 100644 --- "a/Spark Streaming \346\272\220\347\240\201\350\247\243\346\236\220\347\263\273\345\210\227/2.1 JobScheduler, Job, JobSet \350\257\246\350\247\243.md" +++ "b/Spark Streaming \346\272\220\347\240\201\350\247\243\346\236\220\347\263\273\345\210\227/2.1 JobScheduler, Job, JobSet \350\257\246\350\247\243.md" @@ -129,13 +129,13 @@ class JobScheduler(val ssc: StreamingContext) extends Logging { // 完整代码可见本文最后的附录 val BLOCK_INTERVAL = 1 // in seconds val BATCH_INTERVAL = 5 // in seconds -val CURRENT_JOBS = 10 // in seconds +val CURRENT_JOBS = 10 ... // DStream DAG 定义开始 val inputStream = ssc.receiverStream(...) -inputStream.foreachRDD(_ => Thread.sleep(Int.MaxValue)) // output 1 -inputStream.foreachRDD(_ => Thread.sleep(Int.MaxValue)) // output 2 +inputStream.foreachRDD(_ => Thread.sleep(Int.MaxValue)) // output 1 +inputStream.foreachRDD(_ => Thread.sleep(Int.MaxValue)) // output 2 // DStream DAG 定义结束 ... ```