Text Generation
Transformers
Safetensors
PyTorch
mistral
smarter
code
chemistry
biology
finance
legal
art
Mixture of Experts
Merge
text-generation-inference
music
climate
medical
673_trillion_parameters
Instructions to use ZeppelinCorp/Charm_15 with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use ZeppelinCorp/Charm_15 with Transformers:
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="ZeppelinCorp/Charm_15")# Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("ZeppelinCorp/Charm_15") model = AutoModelForCausalLM.from_pretrained("ZeppelinCorp/Charm_15") - Notebooks
- Google Colab
- Kaggle
- Local Apps
- vLLM
How to use ZeppelinCorp/Charm_15 with vLLM:
Install from pip and serve model
# Install vLLM from pip: pip install vllm # Start the vLLM server: vllm serve "ZeppelinCorp/Charm_15" # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:8000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "ZeppelinCorp/Charm_15", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }'Use Docker
docker model run hf.co/ZeppelinCorp/Charm_15
- SGLang
How to use ZeppelinCorp/Charm_15 with SGLang:
Install from pip and serve model
# Install SGLang from pip: pip install sglang # Start the SGLang server: python3 -m sglang.launch_server \ --model-path "ZeppelinCorp/Charm_15" \ --host 0.0.0.0 \ --port 30000 # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:30000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "ZeppelinCorp/Charm_15", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }'Use Docker images
docker run --gpus all \ --shm-size 32g \ -p 30000:30000 \ -v ~/.cache/huggingface:/root/.cache/huggingface \ --env "HF_TOKEN=<secret>" \ --ipc=host \ lmsysorg/sglang:latest \ python3 -m sglang.launch_server \ --model-path "ZeppelinCorp/Charm_15" \ --host 0.0.0.0 \ --port 30000 # Call the server using curl (OpenAI-compatible API): curl -X POST "http://localhost:30000/v1/completions" \ -H "Content-Type: application/json" \ --data '{ "model": "ZeppelinCorp/Charm_15", "prompt": "Once upon a time,", "max_tokens": 512, "temperature": 0.5 }' - Docker Model Runner
How to use ZeppelinCorp/Charm_15 with Docker Model Runner:
docker model run hf.co/ZeppelinCorp/Charm_15
| from typing import Dict, List, Optional | |
| import random | |
| class CodeGenerator: | |
| """A class to generate function code in multiple programming languages.""" | |
| # Supported languages and their templates with proper indentation | |
| TEMPLATES: Dict[str, str] = { | |
| "python": "def {func_name}({params}):\n {body}", | |
| "javascript": "function {func_name}({params}) {{\n {body}\n}}", | |
| "cpp": "{ret_type} {func_name}({params}) {{\n {body}\n}}", | |
| "java": "public class Main {{\n public static {ret_type} {func_name}({params}) {{\n {body}\n }}\n}}", | |
| "csharp": "public class Program {{\n public static {ret_type} {func_name}({params}) {{\n {body}\n }}\n}}" | |
| } | |
| # Configuration data | |
| FUNCTION_NAMES: List[str] = ["calculate", "processData", "computeValue", "generateOutput"] | |
| DEFAULT_PARAMS: Dict[str, str] = { | |
| "python": "x: int, y: int", | |
| "javascript": "x, y", | |
| "cpp": "int x, int y", | |
| "java": "int x, int y", | |
| "csharp": "int x, int y" | |
| } | |
| RETURN_TYPES: Dict[str, str] = { | |
| "cpp": "int", | |
| "java": "int", | |
| "csharp": "int", | |
| "python": " -> int", # Type hint for Python | |
| "javascript": "" # No explicit return type | |
| } | |
| DEFAULT_BODY: str = "return x + y;" | |
| def __init__(self, seed: Optional[int] = None): | |
| """Initialize generator with optional random seed.""" | |
| if seed is not None: | |
| random.seed(seed) | |
| def get_supported_languages(self) -> List[str]: | |
| """Return list of supported languages.""" | |
| return list(self.TEMPLATES.keys()) | |
| def generate_code( | |
| self, | |
| language: str, | |
| func_name: Optional[str] = None, | |
| params: Optional[str] = None, | |
| body: Optional[str] = None, | |
| ret_type: Optional[str] = None | |
| ) -> Optional[str]: | |
| """ | |
| Generate code for the specified language with customizable components. | |
| Args: | |
| language: Target programming language | |
| func_name: Optional custom function name | |
| params: Optional custom parameters | |
| body: Optional custom function body | |
| ret_type: Optional custom return type | |
| Returns: | |
| Generated code string or None if language is unsupported | |
| """ | |
| if language not in self.TEMPLATES: | |
| return None | |
| # Use provided values or defaults | |
| selected_func_name = func_name or random.choice(self.FUNCTION_NAMES) | |
| selected_params = params or self.DEFAULT_PARAMS.get(language, "") | |
| selected_body = self._normalize_body(body or self.DEFAULT_BODY, language) | |
| selected_ret_type = ret_type or self.RETURN_TYPES.get(language, "") | |
| try: | |
| return self.TEMPLATES[language].format( | |
| func_name=selected_func_name, | |
| params=selected_params, | |
| body=selected_body, | |
| ret_type=selected_ret_type | |
| ) | |
| except KeyError as e: | |
| raise ValueError(f"Missing template component: {str(e)}") | |
| except Exception as e: | |
| raise RuntimeError(f"Code generation failed: {str(e)}") | |
| def _normalize_body(self, body: str, language: str) -> str: | |
| """Normalize function body based on language requirements.""" | |
| body = body.strip() | |
| if language in {"cpp", "java", "csharp"} and not body.endswith(";"): | |
| return body + ";" | |
| if language == "python": | |
| return body.replace(";", "") # Python doesn't use semicolons | |
| return body | |
| def main(): | |
| """Demonstrate code generation for all supported languages.""" | |
| generator = CodeGenerator(seed=42) # Fixed seed for reproducibility | |
| print("Supported languages:", generator.get_supported_languages()) | |
| print("\nGenerated code examples:") | |
| for lang in generator.get_supported_languages(): | |
| code = generator.generate_code(lang) | |
| print(f"\n{lang.upper()}:\n{code}") | |
| if __name__ == "__main__": | |
| main() |