Skip to content

Instantly share code, notes, and snippets.

<template>
<div>
<label>User Query:</label>
<input v-model="query" :disabled="loading" ></input>
<button @click="getData({ query })" :busy="loading" />
<User v-if="data != null || error != null" :data="data" :error="error" />
</div>
</template>
<script>
import { Ref, ref } from "vue";
export function useFetcher(fetcher) {
const data = ref(null);
const loading = ref(false);
const error = ref(null);
const getData = async (...params) => {
loading.value = true;
data.value = null;
// Example usage:
// userFetcher({ query: this.query });
async function userFetcher(params) {
const data = await axios.get("/api/user", { params });
return data;
}
<template>
<div>
<label>User Query:</label>
<input v-model="query" :disabled="loading" ></input>
<button @click="getData" :busy="loading" />
<User v-if="data != null || error != null" :data="data" :error="error" />
</div>
</template>
<script>
// Load Environment variables from dotenv
require('dotenv').config();
const express = require("express");
const bodyParser = require("body-parser");
// Import Botkit's core features
const { Botkit } = require("botkit");
// Import a platform-specific adapter for slack.
const { SlackAdapter, SlackEventMiddleware } = require('botbuilder-adapter-slack');
## Accompanying code for - Processing large S3 files with AWS Lambda
## https://medium.com/swlh/processing-large-s3-files-with-aws-lambda-2c5840ae5c91
import csv
import json
import os
import boto3
import botocore.response
new_offset = offset + bodylines.offset
if new_offset < s3_object.content_length:
new_event = {
**event,
"offset": new_offset,
"fieldnames": fieldnames
}
invoke_lambda(context.function_name, new_event)
MINIMUN_REMAINING_TIME_MS = 10000
csv_reader = csv.DictReader(bodylines.iter_lines(), fieldnames=fieldnames)
for row in csv_reader:
## process records here
## or alternatively forward records to kinesis or SQS to be processed seperately
if context.get_remaining_time_in_millis() < MINIMUN_REMAINING_TIME_MS:
fieldnames = fieldnames or csv_reader.fieldnames
break
def get_object_bodylines(s3_object, offset):
resp = s3_object.get(Range=f'bytes={offset}-')
body: botocore.response.StreamingBody = resp['Body']
return BodyLines(body)
class BodyLines:
def __init__(self, body: botocore.response.StreamingBody, initial_offset=0):
self.body = body
self.offset = initial_offset
def handler(event, context):
bucket_name = event['bucket']
object_key = event['object_key']
offset = event.get('offset', 0)
fieldnames = event.get('fieldnames', None)