mnagired commited on
Commit
3f48d9a
·
verified ·
1 Parent(s): ad883a0

Update uncertainty/granite-4.0-micro/README.md

Browse files
uncertainty/granite-4.0-micro/README.md CHANGED
@@ -27,7 +27,8 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
27
  from peft import PeftModel
28
 
29
  BASE_NAME = "ibm-granite/granite-4.0-micro"
30
- LORA_NAME = "path/to/uncertainty/lora/adapter"
 
31
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
32
 
33
  # Load model
@@ -35,7 +36,8 @@ tokenizer = AutoTokenizer.from_pretrained(BASE_NAME, padding_side="left", trust_
35
  model_base = AutoModelForCausalLM.from_pretrained(BASE_NAME, device_map="auto", torch_dtype=torch.bfloat16)
36
  model_uq = PeftModel.from_pretrained(
37
  AutoModelForCausalLM.from_pretrained(BASE_NAME, device_map="auto", torch_dtype=torch.bfloat16),
38
- LORA_NAME,
 
39
  )
40
 
41
  question = "What is IBM Research?"
 
27
  from peft import PeftModel
28
 
29
  BASE_NAME = "ibm-granite/granite-4.0-micro"
30
+ LORA_REPO = "ibm-granite/granitelib-core-r1.0"
31
+ LORA_SUBFOLDER = "uncertainty/granite-4.0-micro/lora"
32
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
33
 
34
  # Load model
 
36
  model_base = AutoModelForCausalLM.from_pretrained(BASE_NAME, device_map="auto", torch_dtype=torch.bfloat16)
37
  model_uq = PeftModel.from_pretrained(
38
  AutoModelForCausalLM.from_pretrained(BASE_NAME, device_map="auto", torch_dtype=torch.bfloat16),
39
+ LORA_REPO,
40
+ subfolder=LORA_SUBFOLDER,
41
  )
42
 
43
  question = "What is IBM Research?"