Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions manifests/g/ggml/llamacpp/b7285/ggml.llamacpp.installer.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# Created with komac v2.11.2
# yaml-language-server: $schema=https://aka.ms/winget-manifest.installer.1.9.0.schema.json

PackageIdentifier: ggml.llamacpp
PackageVersion: b7285
InstallerType: zip
NestedInstallerType: portable
NestedInstallerFiles:
- RelativeFilePath: llama-batched-bench.exe
- RelativeFilePath: llama-bench.exe
- RelativeFilePath: llama-cli.exe
- RelativeFilePath: llama-gguf-split.exe
- RelativeFilePath: llama-imatrix.exe
- RelativeFilePath: llama-mtmd-cli.exe
- RelativeFilePath: llama-perplexity.exe
- RelativeFilePath: llama-quantize.exe
- RelativeFilePath: llama-run.exe
- RelativeFilePath: llama-server.exe
- RelativeFilePath: llama-tokenize.exe
- RelativeFilePath: llama-tts.exe
Dependencies:
PackageDependencies:
- PackageIdentifier: Microsoft.VCRedist.2015+.x64
ReleaseDate: 2025-12-05
ArchiveBinariesDependOnPath: true
Installers:
- Architecture: x64
InstallerUrl: https://github.com/ggml-org/llama.cpp/releases/download/b7285/llama-b7285-bin-win-vulkan-x64.zip
InstallerSha256: 13D647012DFD10CC17359301BDB900AF3076FA70F3236DF0757A2985C11B4928
ManifestType: installer
ManifestVersion: 1.9.0
42 changes: 42 additions & 0 deletions manifests/g/ggml/llamacpp/b7285/ggml.llamacpp.locale.en-US.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# Created with komac v2.11.2
# yaml-language-server: $schema=https://aka.ms/winget-manifest.defaultLocale.1.9.0.schema.json

PackageIdentifier: ggml.llamacpp
PackageVersion: b7285
PackageLocale: en-US
Publisher: ggml
PublisherUrl: https://github.com/ggml-org
PublisherSupportUrl: https://github.com/ggml-org/llama.cpp/issues
PackageName: llama.cpp
PackageUrl: https://github.com/ggml-org/llama.cpp
License: MIT
LicenseUrl: https://github.com/ggml-org/llama.cpp/blob/HEAD/LICENSE
ShortDescription: LLM inference in C/C++
Tags:
- ggml
- llama
ReleaseNotes: |-
Warning
Release Format Update: Linux releases will soon use .tar.gz archives instead of .zip. Please make the necessary changes to your deployment scripts.
HIP : fix RDNA4 build (#17792)
macOS/iOS:
- macOS Apple Silicon (arm64)
- macOS Intel (x64)
- iOS XCFramework
Linux:
- Ubuntu x64 (CPU)
- Ubuntu x64 (Vulkan)
- Ubuntu s390x (CPU)
Windows:
- Windows x64 (CPU)
- Windows arm64 (CPU)
- Windows x64 (CUDA)
- Windows x64 (Vulkan)
- Windows x64 (SYCL)
- Windows x64 (HIP)
ReleaseNotesUrl: https://github.com/ggml-org/llama.cpp/releases/tag/b7285
Documentations:
- DocumentLabel: Wiki
DocumentUrl: https://github.com/ggml-org/llama.cpp/wiki
ManifestType: defaultLocale
ManifestVersion: 1.9.0
8 changes: 8 additions & 0 deletions manifests/g/ggml/llamacpp/b7285/ggml.llamacpp.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Created with komac v2.11.2
# yaml-language-server: $schema=https://aka.ms/winget-manifest.version.1.9.0.schema.json

PackageIdentifier: ggml.llamacpp
PackageVersion: b7285
DefaultLocale: en-US
ManifestType: version
ManifestVersion: 1.9.0