I hereby claim:
- I am anabranch on github.
- I am billc (https://keybase.io/billc) on keybase.
- I have a public key whose fingerprint is 4766 503D C86D 17E1 0E45 D1AE 44C3 1679 6FBE AC9D
To claim this, I am signing this object:
| import argparse | |
| import time | |
| import random | |
| import math | |
| from ray.util.multiprocessing import Pool | |
| parser = argparse.ArgumentParser(description="Approximate digits of Pi using Monte Carlo simulation.") | |
| parser.add_argument("--num-samples", type=int, default=1000000) |
| import ray | |
| from utils import adder, timer | |
| if __name__ == '__main__': | |
| ray.init(address='auto') | |
| # ray.init(num_cpus=2) | |
| values = range(10) | |
| new_values = [adder.remote(x) for x in values] | |
| timer(new_values) |
| import ray | |
| import time | |
| from datetime import datetime as dt | |
| @ray.remote | |
| def adder(x): | |
| return x+1 | |
| def timer(values): | |
| start = dt.now() |
| # A unique identifier for the head node and workers of this cluster. | |
| cluster_name: basic-ray | |
| # The maximum number of workers nodes to launch in addition to the head | |
| # node. This takes precedence over min_workers. min_workers defaults to 0. | |
| max_workers: 0 # this means zero workers | |
| # Cloud-provider specific configuration. | |
| provider: | |
| type: aws | |
| region: us-west-2 | |
| availability_zone: us-west-2a |
| import ray | |
| import time | |
| from datetime import datetime as dt | |
| @ray.remote | |
| def adder(input_value): | |
| time.sleep(1) | |
| return input_value+1 | |
| if __name__ == '__main__': |
| # Databricks notebook source | |
| # MAGIC %md | |
| # MAGIC | |
| # MAGIC # RDDs | |
| # COMMAND ---------- | |
| rdd = sc.parallelize(range(1000), 5) | |
| print(rdd.take(10)) |
| $ ./bin/spark-shell | |
| Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties | |
| Setting default log level to "WARN". | |
| To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel). | |
| 16/12/11 13:43:58 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable | |
| 16/12/11 13:43:58 WARN Utils: Your hostname, bill-ubuntu resolves to a loopback address: 127.0.1.1; using 192.168.42.75 instead (on interface wlp2s0) | |
| 16/12/11 13:43:58 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address | |
| Spark context Web UI available at http://192.168.42.75:4040 | |
| Spark context available as 'sc' (master = local[*], app id = local-1481492639112). | |
| Spark session available as 'spark'. |
I hereby claim:
To claim this, I am signing this object:
| from bs4 import BeautifulSoup | |
| import re | |
| import glob | |
| | |
| def get_prod(soup): | |
| production_companies = [] | |
| for row in soup.select("tr"): | |
| for th in row.select("th"): | |
| if th.text.strip() == "Production\ncompany": |