commit 3cd93f27b5bf732c6ea0b9481f61c5cbe35eed30 Author: aethrvmn Date: Wed Oct 23 00:26:05 2024 +0200 Move project to nyrid melite diff --git a/.gitignore b/.gitignore new file mode 100755 index 0000000..32e1dc0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +nimcache/ +nimblecache/ +htmldocs/ diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..913d895 --- /dev/null +++ b/LICENSE @@ -0,0 +1,61 @@ +Don't Be Evil License (DBEL) 1.0 + +1. Acceptance +By using, copying, modifying, or distributing the source code, training data, training environment, or its associated machine learning model weights (collectively the "Software"), you agree to comply with all terms outlines in this license. + +2. Copyright License +The Licensor (defined below) grants you a non-exclusive, worldwide, royalty-free, non-sublicensable, non-transferable clicense to use, copy, modify, and distribute the Software, including associated model weights, training data, and training environments, subject to the conditions set forth in this license. This includes the right to create and distribute derivative works of the Software, provided that the limitations below are observed. + +3. Non-Commercial Use Only +You may use, copy, modify, and distribute the Software and derivative works solely for non-commercial purposes. Non-commercial purposes include, but are not limited to: +- Personal research and study. +- Educational and academic projects. +- Public knowledge and hobby projects +- Religious observance. +- Non-commercial research, or AI and machine learning (ML) experimentation. + +4. Distribution and Monetization Provisions +Any use of the Software or derivative works for profit, or in a business context, including in monetized services and products, requries explicit, seperate permission from the Licensor. The restrictions on commercial use apply to both the source code and any model weights produced by the Software. + +Any distribution must include this license, and the non-commercial restriction must be maintained. Weights resulting from use of the Software, including but not limited to training or fine-tuning models, must be shared under this same license, ensuring all restrictions and conditions are preserved. + +5. Integrity of the Licensor's Software +You may not alter, remove, or obscure any functionalities related to payment, donation, or attribution in any distributed version of the Licensed Materials. You must retain all notices of copyright, licensing, and attribution provided by the Licensor in any derivative works. + +You may not alter or remove copyright, license, or trademark notices in the Software, and any public mention of the Software must include attribution to the Licensor. + +6. Patents +This license grants you a patent license under any patents held by the Licensor that are directly related to the Software. If you or your company make any claim that the Software infringes on a patent, your rights under this license terminate immediately. + +7. Distribution of Modifications +If you modify the Software, you must +- Provide prominent and clear notice of any modifications +- Retain all original notices of copyright, licensing, and attribution to the Licensor. +- Distribute modified versions under this license. + +8. Fair Use +Nothing under this license restricts your rights under applicable laws regarding fair use of copyrighted material. + +9. No Other Rights +These terms do not allow you to sublicense, assign, or transfer any of your rights to third parties, except as expressly allowed by the terms. + +These terms do not prevent the Licensor from granting licenses to anyone else. + +These terms do not imply any other licenses. + +No other rights beyond those explicitly stated are granted. + +10. Termination +Your rights under this license will automatically terminate if you breach any of its terms. The Licensor may provide you with a 30-day period to rectify any breach. If you fail to do so, or if you breach the terms again after rectification, your license will terminate permanently. + +11. Disclaimer of Warranty +The Licensed Materials are provides “as-is”, without any warranties, express or implied, including but not limited to warranties of fitness for a particular purpose. The Licensor is not liable for any claims or damages arising from your use of the Licensed Materials. + +12. Definitions +- "Licensor": The entity or individual offering the Licensed Materials under this license. +- "Licensed Materials": The software, source code, training data, training environment, model weights, and any associated AI/ML components provided under this license. +- "You": The individual or entity accepting the terms of this license, including any organization or entity that this individual or entity might work for or represent, including any entities under common control. +- "Your license": The license granted to you for the software under this terms. +- "Model weights": The machine learning model parameters generated by training or fine-tuning models using the Licensed Materials. +- "Use": Anything you do with the software requiring your license +- "Trademark": Trademarks, service marks, and similar rights. diff --git a/README.md b/README.md new file mode 100755 index 0000000..70b7b60 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# melite + +an exploration of nlp bigram and ngram models using nim, to both learn the language and to learn nlp diff --git a/batcher.nim b/batcher.nim new file mode 100755 index 0000000..9707b22 --- /dev/null +++ b/batcher.nim @@ -0,0 +1,34 @@ +import hparams + +import arraymancer +### CPU Part Starts Here +# var trainingBlock: seq[int] = trainingSet[0..blockSize] +# var trainingBlockNext: seq[int] = trainingSet[1..blockSize+1] + +# for i in 0..blockSize-1: +# var context = trainingBlock[0..i+1] +# var target = trainingBlockNext[i] +# echo "when input is", context, "target is", target +#[ +The above is done sequentially on the CPU, as a baseline since I can't afford a GPU. +Below is the implementation for the GPU, using batches. We can (and probably will) use the CPU for this, but Arraymancer allows to send to device at compile time using a flag (-d:cuda) so we don't have to use the PyTorch .to_device('cuda') stuff . More testing is definitely needed. +]# +proc getBatch*(split: string, trainingSet: seq[int], validationSet: seq[int]): (Tensor[int], Tensor[int]) = + var data: seq[int] + if split == "train": + data = trainingSet + else: + data = validationSet + + let ix = randomTensor(shape=[batchSize], max=len(data)-blockSize) + + var + x: Tensor[int] = [data[0..