- Create a GKE cluster with a GPU node pool:
gcloud container clusters create gpu-sharing-demo --zone us-central1-c| source 'https://rubygems.org' | |
| gem 'selenium-webdriver', '~> 4.23.0' |
| <!doctype html> | |
| <html> | |
| <head> | |
| <meta charset="UTF-8" /> | |
| <meta | |
| http-equiv="origin-trial" | |
| content="Aq6vv/4syIkcyMszFgCc9LlH0kX88jdE7SXfCFnh2RQN0nhhL8o6PCQ2oE3a7n3mC7+d9n89Repw5HYBtjarDw4AAAB3eyJvcmlnaW4iOiJodHRwczovL3B5b2RpZGUub3JnOjQ0MyIsImZlYXR1cmUiOiJXZWJBc3NlbWJseUpTUHJvbWlzZUludGVncmF0aW9uIiwiZXhwaXJ5IjoxNzMwMjQ2Mzk5LCJpc1N1YmRvbWFpbiI6dHJ1ZX0=" | |
| /> | |
| <meta |
| #!/bin/bash | |
| gh pr list -L 57000 -s closed --json number,closedAt,mergedAt,updatedAt,url -q '.[] | select(.mergedAt == null and .updatedAt >= "2024-02-03T05:00:00Z" and .updatedAt <= "2024-02-03T06:00:00Z")' > misclosed.txt | |
| # Add array braces and commas to make misclosed.txt a valid JSON list... | |
| echo "[" > misclosed.json | |
| awk 'NR > 1 { print line "," } { line = $0 } END { print line }' misclosed.txt >> misclosed.json | |
| echo "]" >> misclosed.json | |
| jq '.[].number' misclosed.json | while read number; do |
| #!/bin/bash | |
| # This list was generated by: | |
| # git branch -a | grep remote | awk '{$1=$1};1' | gxargs -d'\n' sh -c 'for arg do echo "`git rev-list --count HEAD ^$arg` $arg"; done' | sort -r -k1 -n | |
| for i in cpirich/honor-data-tables-from-project-template-level ha/refactor-survey-pipeline maker-webusb ha/show-ayw-result cpirich/expose-export-app-as-beta ha/fix-ta-eyes-test-2 ha/fix-ta-eyes-test ha/sp-test-parser ha/sp-test-e2e ha/cr-prototype max/LP-1021reusdedropdown max/LP-1021 max/dronebeforecr/LP-1021 max/drone/LP-1021 max/LP-1358 ha/cr-query-timeout ha/cr-raw-aggregated revert-35855-molly/move-render-test cforkish/assign-to-google-classroom max/LP-1481 fixprogresscrolling/LP-1481 fixgrid2/LP-1481 fixgrid/LP-1481 cforkish/LP-1518-test ha/oceans-cache cforkish/progress-data-refactor cforkish/LP-1649-move-best-result-to-backend cforkish/why-did-you-render ha/analyze-i18n-pr ha/oceans-offline-december dependabot/npm_and_yarn/aws/offsite/prune-aurora-backups/src/lodash-4.17.20 dependabot/npm_and_yarn/aws/cloudfo |
| #!/bin/bash | |
| set -x | |
| [email protected]:code-dot-org/code-dot-org.git | |
| # We use a --mirror clone because non-mirror clones were not having | |
| # all remote refspecs updated by `git lfs migrate`, which meant | |
| # we weren't succesfully removing the giant old tree with blobs. | |
| git clone $REPO --mirror |
| #!/bin/bash | |
| set -xe | |
| ############################################################ | |
| # First, installs an empty package that fulfills libappindicator1.equivs | |
| # allowing signal linux to be installed on debian buster | |
| ############################################################ | |
| sudo apt install equivs |
| #!/bin/bash | |
| # note: boost needs to be >= 1.73, make sure [email protected] is linked too | |
| brew install boost boost-python3 sqlite gdal cairo [email protected] | |
| brew upgrade boost boost-python3 sqlite gdal cairo [email protected] | |
| brew link boost | |
| # We're build a custom mapnik 4, uninstall the stale 3.x version in homebrew: | |
| brew uninstall mapnik |
| # Use Google Cloud Platform stackdriver with python structlog | |
| from google.cloud.logging import Client | |
| from google.cloud.logging import _helpers | |
| from google.cloud.logging.handlers import CloudLoggingHandler | |
| from google.cloud.logging.handlers.transports.background_thread import _Worker | |
| # pip install python-json-logger | |
| from pythonjsonlogger import jsonlogger |
| from pathlib import Path | |
| import re | |
| import requests | |
| from urllib.parse import urljoin | |
| import ipykernel | |
| from notebook.notebookapp import list_running_servers | |
| def files_url_for(kernel_path): | |
| _server = server() |