Skip to content

Commit

Permalink
Merge pull request #77 from justinmerrell/starter-examples
Browse files Browse the repository at this point in the history
feat: prompt cleanup and new starter examples
  • Loading branch information
DireLines authored Jan 30, 2024
2 parents 848d4ef + 896a022 commit 3dd8d71
Show file tree
Hide file tree
Showing 13 changed files with 163 additions and 65 deletions.
4 changes: 2 additions & 2 deletions cmd/project/functions.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import (

// TODO: embed all hidden files even those not at top level
//
//go:embed starter_templates/* starter_templates/*/.*
//go:embed starter_examples/* starter_examples/*/.*
var starterTemplates embed.FS

//go:embed example.toml
Expand All @@ -27,7 +27,7 @@ var tomlTemplate embed.FS
//go:embed exampleDockerfile
var dockerfileTemplate embed.FS

const basePath string = "starter_templates"
const basePath string = "starter_examples"

func baseDockerImage(cudaVersion string) string {
return fmt.Sprintf("runpod/base:0.4.4-cuda%s", cudaVersion)
Expand Down
68 changes: 46 additions & 22 deletions cmd/project/project.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package project
import (
"cli/api"
"fmt"
"os"
"strings"

"github.com/manifoldco/promptui"
Expand All @@ -28,6 +29,7 @@ func prompt(message string) string {
}
return s
}

func contains(input string, choices []string) bool {
for _, choice := range choices {
if input == choice {
Expand All @@ -36,6 +38,7 @@ func contains(input string, choices []string) bool {
}
return false
}

func promptChoice(message string, choices []string, defaultChoice string) string {
var s string = ""
for !contains(s, choices) {
Expand Down Expand Up @@ -84,12 +87,13 @@ func selectNetworkVolume() (networkVolumeId string, err error) {
networkVolumeId = options[i].Value
return networkVolumeId, nil
}

func selectStarterTemplate() (template string, err error) {
type StarterTemplateOption struct {
Name string // The string to display
Value string // The actual value to use
}
templates, err := starterTemplates.ReadDir("starter_templates")
templates, err := starterTemplates.ReadDir("starter_examples")
if err != nil {
fmt.Println("Something went wrong trying to fetch starter templates")
fmt.Println(err)
Expand Down Expand Up @@ -129,39 +133,59 @@ var NewProjectCmd = &cobra.Command{
Use: "new",
Args: cobra.ExactArgs(0),
Short: "create a new project",
Long: "create a new Runpod project folder",
Long: "create a new RunPod project folder",
Run: func(cmd *cobra.Command, args []string) {
fmt.Println("Creating a new project...")

// Project Name
if projectName == "" {
projectName = prompt("Enter the project name")
} else {
fmt.Println("Project name: " + projectName)
}
fmt.Println("Project name: " + projectName)

// Starter Example
if modelType == "" {
template, err := selectStarterTemplate()
modelType = template
starterExample, err := selectStarterTemplate()
modelType = starterExample
if err != nil {
modelType = ""
}
}
cudaVersion := promptChoice("Select a CUDA version, or press enter to use the default",

// CUDA Version
cudaVersion := promptChoice("Select CUDA Version [default: 11.8.0]: ",
[]string{"11.1.1", "11.8.0", "12.1.0"}, "11.8.0")
pythonVersion := promptChoice("Select a Python version, or press enter to use the default",

// Python Version
pythonVersion := promptChoice("Select Python Version [default: 3.10]: ",
[]string{"3.8", "3.9", "3.10", "3.11"}, "3.10")
fmt.Printf(`
Project Summary:
- Project Name: %s
- Starter Template: %s
- CUDA Version: %s
- Python Version: %s
`, projectName, modelType, cudaVersion, pythonVersion)
fmt.Println()
fmt.Println("The project will be created in the current directory.")
//TODO confirm y/n
createNewProject(projectName, cudaVersion,
pythonVersion, modelType, modelName, initCurrentDir)
fmt.Printf("Project %s created successfully!", projectName)
fmt.Println()

// Project Summary
fmt.Println("\nProject Summary:")
fmt.Println("------------------------------------------------")
fmt.Printf("Project Name : %s\n", projectName)
fmt.Printf("Starter Example : %s\n", modelType)
fmt.Printf("CUDA Version : %s\n", cudaVersion)
fmt.Printf("Python Version : %s\n", pythonVersion)
fmt.Println("------------------------------------------------")

// Confirm
currentDir, err := os.Getwd()
if err != nil {
fmt.Println("Error getting current directory:", err)
return
}

fmt.Printf("\nThe project will be created in the current directory: %s\n", currentDir)
confirm := promptChoice("Proceed with creation? [yes/no, default: yes]: ", []string{"yes", "no"}, "yes")
if confirm != "yes" {
fmt.Println("Project creation cancelled.")
return
}

// Create Project
createNewProject(projectName, cudaVersion, pythonVersion, modelType, modelName, initCurrentDir)
fmt.Printf("\nProject %s created successfully!\n", projectName)
fmt.Println("From your project root run `runpodctl project dev` to start a development pod.")
},
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,3 @@
# To learn more, see https://pip.pypa.io/en/stable/reference/requirements-file-format/

<<RUNPOD>>
hf_transfer
14 changes: 14 additions & 0 deletions cmd/project/starter_examples/LLM/builder/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# Required Python packages get listed here, one per line.
# Recommended to lock the version number to avoid unexpected changes.

# You can also install packages from a git repository, e.g.:
# git+https://github.com/runpod/runpod-python.git
# To learn more, see https://pip.pypa.io/en/stable/reference/requirements-file-format/

<<RUNPOD>>
hf_transfer

torch
accelerate
transformers
sentencepiece
36 changes: 36 additions & 0 deletions cmd/project/starter_examples/LLM/src/handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
''' A starter example for a handler file using RunPod and a large language model for text generation. '''

import io
import base64
from typing import Dict

import runpod
from transformers import T5Tokenizer, T5ForConditionalGeneration

# Initialize the tokenizer and model
tokenizer = T5Tokenizer.from_pretrained("google/flan-t5-base")
model = T5ForConditionalGeneration.from_pretrained("google/flan-t5-base", device_map="auto").to("cuda")


def handler(job: Dict[str, any]) -> str:
"""
Handler function for processing a job.
Args:
job (dict): A dictionary containing the job input.
Returns:
str: The generated text response.
"""

job_input = job['input']
input_text = job_input['text']

input_ids = tokenizer(input_text, return_tensors="pt").input_ids.to("cuda")
outputs = model.generate(input_ids)
response = tokenizer.decode(outputs[0])

return response


runpod.serverless.start({"handler": handler})
10 changes: 10 additions & 0 deletions cmd/project/starter_examples/Stable Diffusion/.runpodignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
# Similar to .gitignore
# Matches will not be synced to the development pod or cause the development pod to reload.

Dockerfile
__pycache__/
*.pyc
.*.swp
.git/
*.tmp
*.log
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Required Python packages get listed here, one per line.
# Recommended to lock the version number to avoid unexpected changes.

# You can also install packages from a git repository, e.g.:
# git+https://github.com/runpod/runpod-python.git
# To learn more, see https://pip.pypa.io/en/stable/reference/requirements-file-format/

<<RUNPOD>>
hf_transfer

accelerate
diffusers
transformers
42 changes: 42 additions & 0 deletions cmd/project/starter_examples/Stable Diffusion/src/handler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
''' A starter example for a handler file using RunPod and diffusers for image generation. '''

import io
import base64
from typing import Dict

import runpod
from diffusers import AutoPipelineForText2Image
import torch

# Initialize the pipeline
pipe = AutoPipelineForText2Image.from_pretrained(
"stabilityai/sdxl-turbo", # model name
torch_dtype=torch.float16, variant="fp16"
).to("cuda")


def handler(job: Dict[str, any]) -> str:
"""
Handler function for processing a job.
Args:
job (dict): A dictionary containing the job input.
Returns:
str: A base64 encoded string of the generated image.
"""

job_input = job['input']
prompt = job_input['prompt']

image = pipe(prompt=prompt, num_inference_steps=1, guidance_scale=0.0).images[0]

with io.BytesIO() as buffer:
image.save(buffer, format="PNG")
image_bytes = buffer.getvalue()
base64_image = base64.b64encode(image_bytes).decode('utf-8')

return f"data:image/png;base64,{base64_image}"


runpod.serverless.start({"handler": handler})
4 changes: 0 additions & 4 deletions cmd/project/starter_templates/llama2/builder/requirements.txt

This file was deleted.

36 changes: 0 additions & 36 deletions cmd/project/starter_templates/llama2/src/handler.py

This file was deleted.

0 comments on commit 3dd8d71

Please sign in to comment.