I hereby claim:
- I am rachtsingh on github.
- I am rachitsingh (https://keybase.io/rachitsingh) on keybase.
- I have a public key ASAtaQuPwR-0aYEy7PkgjheFZh-b4C2ZfWG7ssWrHRDi8Qo
To claim this, I am signing this object:
| #!/usr/bin/env zsh | |
| set -euo pipefail | |
| # 0. Install uv (fast Python+venv manager) if missing | |
| if ! command -v uv >/dev/null; then | |
| echo "→ Installing uv…" | |
| curl -LsSf https://astral.sh/uv/install.sh | sh | |
| export PATH="$HOME/.cargo/bin:$PATH" # uv installer puts it here by default | |
| fi |
| use clap::Parser; | |
| use flate2::Compression as GzipCompression; | |
| use flate2::write::GzEncoder; | |
| use humansize::{BINARY, format_size}; | |
| use lz4::EncoderBuilder as Lz4EncoderBuilder; | |
| use std::fs::File; | |
| use std::io::{self, Read, Write}; | |
| use std::path::PathBuf; | |
| use std::time::Instant; | |
| use tempfile::tempdir; |
| """ | |
| This script exports the AutoGPT-Q Llama 2 weights in llama2rs.bin format. | |
| """ | |
| import pathlib | |
| import click | |
| import struct | |
| import torch | |
| from torch import nn | |
| from auto_gptq import AutoGPTQForCausalLM | |
| from auto_gptq.nn_modules import qlinear |
| #!/usr/local/bin/python3 | |
| """ | |
| This script should idempotently grab the stuff that's newly added to Pocket to pocket.md | |
| You'll need to install https://airtable-python-wrapper.readthedocs.io, i.e. via: | |
| $ pip3 install airtable-python-wrapper | |
| and, of course, sign up for an Airtable account (free seems to work fine). | |
| """ |
| #include <stdio.h> | |
| #include <stdlib.h> | |
| // void * return type means that it returns a naked pointer, which is just an address. | |
| void *foo(void) { | |
| long *bar_ptr = malloc(sizeof(long)); | |
| *bar_ptr = 5; | |
| printf("The data is held at address: %p, holds %ld bytes\n", bar_ptr, sizeof(long)); | |
| free(bar_ptr); | |
| return (void *) bar_ptr; |
| #include <stdio.h> | |
| // void * return type means that it returns a naked pointer, which is just an address. | |
| void *foo(void) { | |
| long bar = 5; | |
| printf("bar's address is: %p, holds %ld bytes\n", &bar, sizeof(long)); | |
| long *ret = &bar; | |
| return (void *) ret; // ret is a pointer to a long, but we just turn it into a regular address | |
| // note that the previous line doesn't *do* anything, it just gets the compiler off our back | |
| } |
| import numpy as np | |
| from matplotlib import pyplot as plt | |
| def sample_gamma(alpha): | |
| total = 0 | |
| scale = 1. | |
| if (alpha < 1.0): | |
| scale *= (1 - np.random.uniform() ** (1.0 / alpha)) | |
| total += 1 |
| #include "ATen/NativeFunctions.h" | |
| #include "ATen/Dispatch.h" | |
| #include "ATen/cuda/CUDAApplyUtils.cuh" | |
| #include <curand.h> | |
| #include <curand_kernel.h> | |
| #include <curand_philox4x32_x.h> | |
| #include <utility> | |
| #include <functional> | |
| #include <nvfunctional> |
I hereby claim:
To claim this, I am signing this object:
| [01/30/17 13:57:47 INFO] Using GPU(s): 1 | |
| [01/30/17 13:57:47 INFO] Loading data from '../data/translate-train.t7'... | |
| [01/30/17 13:57:51 INFO] * vocabulary size: source = 50004; target = 50004 | |
| [01/30/17 13:57:51 INFO] * additional features: source = 0; target = 0 | |
| [01/30/17 13:57:51 INFO] * maximum sequence length: source = 50; target = 51 | |
| [01/30/17 13:57:51 INFO] * number of training sentences: 100000 | |
| [01/30/17 13:57:51 INFO] * maximum batch size: 64 | |
| [01/30/17 13:57:51 INFO] Building model... | |
| [01/30/17 13:57:55 INFO] * using input feeding | |
| [01/30/17 13:57:56 INFO] Initializing parameters... |