Every time you choose to apply a rule(s), explicitly state the rule(s) in the output. You can abbreviate the rule description to a single word or phrase.
[Brief description ]
- [more description]
- [more description]
- [more description]
In the Generative AI Age your ability to generate prompts is your ability to generate results.
Claude 3.5 Sonnet and o1 series models are recommended for meta prompting.
Replace {{user-input}}
with your own input to generate prompts.
Use mp_*.txt
as example user-input
s to see how to generate high quality prompts.
Rank,Overall Acc,Model,Model Link,Organization,License,AST Summary,Exec Summary,Simple Function AST,Python Simple Function AST,Java Simple Function AST,JavaScript Simple Function AST,Multiple Functions AST,Parallel Functions AST,Parallel Multiple AST,Simple Function Exec,Python Simple Function Exec,REST Simple Function Exec,Multiple Functions Exec,Parallel Functions Exec,Parallel Multiple Exec,Relevance Detection,Cost ($ Per 1k Function Calls),Latency Mean (s),Latency Standard Deviation (s),Latency 95th Percentile (s) | |
1,78.76%,GPT-4-turbo-2024-04-09 (FC),https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo,OpenAI,Proprietary,81.70%,65.13%,73.82%,90.00%,33.00%,26.00%,89.50%,89.00%,74.50%,73.53%,83.00%,60.00%,70.00%,72.00%,45.00%,88.75%,4.79,5.68,6.67,20.07 | |
2,73.71%,Claude-3-Opus-20240229 (FC tools-2024-04-04),https://www.anthropic.com/news/claude-3-family,Anthropic,Proprietary,70.35%,55.20%,80.91%,87.00%,61.00%,72.00%,91.00%,58.00%,51.50%,85.29%,85.00%,85.71%,74.00%,24.00%,37.50%,82.50%,30.65,12.63,3. |
from datetime import datetime | |
import urllib.request | |
import base64 | |
import json | |
import time | |
import os | |
webui_server_url = 'http://127.0.0.1:7860' | |
out_dir = 'api_out' |
# Credit 🙏: I just used the example from langchain docs and it works quite well: https://python.langchain.com/en/latest/use_cases/question_answering.html | |
# Note 2: The Arxiv -> PDF logic is a bit messy, I'm sure it can be done better | |
# Note 3: Please install the following: | |
# To run: | |
# Save this in a `app.py` | |
# pip install arxiv PyPDF2 langchain chromadb | |
# The chat feature was shipped in H2O nightly this week, we will need to install from nightly link: |
function ipToInt(ip: string) { | |
return ip.split(".").reduce((acc, octet) => (acc << 8) + parseInt(octet), 0); | |
} | |
function isIpInCIDR(ip: string, cidr: string) { | |
const [cidrIp, prefixLength] = cidr.split("/"); | |
const mask = -1 << (32 - parseInt(prefixLength)); | |
const ipInt = ipToInt(ip); | |
const cidrIpInt = ipToInt(cidrIp); |
from langchain.llms import Anthropic | |
from langchain.agents import load_tools, initialize_agent | |
from langchain.tools import AIPluginTool | |
PREFIX = """\n\nHuman: Answer the following questions as best you can. You have access to the following tools:""" | |
SUFFIX = """Begin! | |
Question: {input} | |
\n\nAssistant: | |
Thought:{agent_scratchpad}""" |
# Credit to https://github.com/nsarrazin/serge - this is heavily copied from the API there and not very well yet but it might work.w | |
from typing import List, Optional | |
from uuid import UUID, uuid4 | |
from pydantic import BaseModel, Field | |
from datetime import datetime | |
import subprocess, os | |
import asyncio |
pub resource SaleCollection: SalePublic { | |
pub var forSale: @{UInt64: PinataPartyContract.NFT} | |
pub var prices: {UInt64: UFix64} | |
access(account) let ownerVault: Capability<&AnyResource{PinnieToken.Receiver}> | |
init (vault: Capability<&AnyResource{PinnieToken.Receiver}>) { | |
self.forSale <- {} | |
self.ownerVault = vault |