亚洲香蕉成人av网站在线观看_欧美精品成人91久久久久久久_久久久久久久久久久亚洲_热久久视久久精品18亚洲精品_国产精自产拍久久久久久_亚洲色图国产精品_91精品国产网站_中文字幕欧美日韩精品_国产精品久久久久久亚洲调教_国产精品久久一区_性夜试看影院91社区_97在线观看视频国产_68精品久久久久久欧美_欧美精品在线观看_国产精品一区二区久久精品_欧美老女人bb

首頁 > 網站 > 幫助中心 > 正文

pytorch之inception

2024-07-09 22:43:00
字體:
來源:轉載
供稿:網友

如下所示:

from __future__ import print_function from __future__ import divisionimport torchimport torch.nn as nnimport torch.optim as optimimport numpy as npimport torchvisionfrom torchvision import datasets, models, transformsimport matplotlib.pyplot as pltimport timeimport osimport copyimport argparseprint("PyTorch Version: ",torch.__version__)print("Torchvision Version: ",torchvision.__version__)# Top level data directory. Here we assume the format of the directory conforms #  to the ImageFolder structure

數據集路徑,路徑下的數據集分為訓練集和測試集,也就是train 以及val,train下分為兩類數據1,2,val集同理

data_dir = "/home/dell/Desktop/data/切割圖像"# Models to choose from [resnet, alexnet, vgg, squeezenet, densenet, inception]model_name = "inception" # Number of classes in the datasetnum_classes = 2#兩類數據1,2# Batch size for training (change depending on how much memory you have)batch_size = 32#batchsize盡量選取合適,否則訓練時會內存溢出# Number of epochs to train for num_epochs = 1000# Flag for feature extracting. When False, we finetune the whole model, #  when True we only update the reshaped layer paramsfeature_extract = True# 參數設置,使得我們能夠手動輸入命令行參數,就是讓風格變得和Linux命令行差不多parser = argparse.ArgumentParser(description='PyTorch inception')parser.add_argument('--outf', default='/home/dell/Desktop/dj/inception/', help='folder to output images and model checkpoints') #輸出結果保存路徑parser.add_argument('--net', default='/home/dell/Desktop/dj/inception/inception.pth', help="path to net (to continue training)") #恢復訓練時的模型路徑args = parser.parse_args()


訓練函數

def train_model(model, dataloaders, criterion, optimizer, num_epochs=25,is_inception=False):  since = time.time()  val_acc_history = []    best_model_wts = copy.deepcopy(model.state_dict())  best_acc = 0.0  print("Start Training, InceptionV3!")   with open("acc.txt", "w") as f1:    with open("log.txt", "w")as f2:      for epoch in range(num_epochs):        print('Epoch {}/{}'.format(epoch+1, num_epochs))        print('*' * 10)        # Each epoch has a training and validation phase        for phase in ['train', 'val']:          if phase == 'train':            model.train() # Set model to training mode          else:            model.eval()  # Set model to evaluate mode              running_loss = 0.0          running_corrects = 0              # Iterate over data.          for inputs, labels in dataloaders[phase]:            inputs = inputs.to(device)            labels = labels.to(device)                # zero the parameter gradients            optimizer.zero_grad()                # forward            # track history if only in train            with torch.set_grad_enabled(phase == 'train'):                            if is_inception and phase == 'train':                # From https://discuss.pytorch.org/t/how-to-optimize-inception-model-with-auxiliary-classifiers/7958                outputs, aux_outputs = model(inputs)                loss1 = criterion(outputs, labels)                loss2 = criterion(aux_outputs, labels)                loss = loss1 + 0.4*loss2              else:                outputs = model(inputs)                loss = criterion(outputs, labels)                  _, preds = torch.max(outputs, 1)                  # backward + optimize only if in training phase              if phase == 'train':                loss.backward()                optimizer.step()                # statistics            running_loss += loss.item() * inputs.size(0)            running_corrects += torch.sum(preds == labels.data)          epoch_loss = running_loss / len(dataloaders[phase].dataset)          epoch_acc = running_corrects.double() / len(dataloaders[phase].dataset)              print('{} Loss: {:.4f} Acc: {:.4f}'.format(phase, epoch_loss, epoch_acc))          f2.write('{} Loss: {:.4f} Acc: {:.4f}'.format(phase, epoch_loss, epoch_acc))          f2.write('/n')          f2.flush()                     # deep copy the model          if phase == 'val':            if (epoch+1)%50==0:              #print('Saving model......')              torch.save(model.state_dict(), '%s/inception_%03d.pth' % (args.outf, epoch + 1))            f1.write("EPOCH=%03d,Accuracy= %.3f%%" % (epoch + 1, epoch_acc))            f1.write('/n')            f1.flush()          if phase == 'val' and epoch_acc > best_acc:            f3 = open("best_acc.txt", "w")            f3.write("EPOCH=%d,best_acc= %.3f%%" % (epoch + 1,epoch_acc))            f3.close()            best_acc = epoch_acc            best_model_wts = copy.deepcopy(model.state_dict())          if phase == 'val':            val_acc_history.append(epoch_acc)  time_elapsed = time.time() - since  print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))  print('Best val Acc: {:4f}'.format(best_acc))  # load best model weights  model.load_state_dict(best_model_wts)  return model, val_acc_history #是否更新參數def set_parameter_requires_grad(model, feature_extracting):  if feature_extracting:    for param in model.parameters():      param.requires_grad = Falsedef initialize_model(model_name, num_classes, feature_extract, use_pretrained=True):  # Initialize these variables which will be set in this if statement. Each of these  #  variables is model specific.  model_ft = None  input_size = 0  if model_name == "resnet":    """ Resnet18    """    model_ft = models.resnet18(pretrained=use_pretrained)    set_parameter_requires_grad(model_ft, feature_extract)    num_ftrs = model_ft.fc.in_features    model_ft.fc = nn.Linear(num_ftrs, num_classes)    input_size = 224  elif model_name == "alexnet":    """ Alexnet    """    model_ft = models.alexnet(pretrained=use_pretrained)    set_parameter_requires_grad(model_ft, feature_extract)    num_ftrs = model_ft.classifier[6].in_features    model_ft.classifier[6] = nn.Linear(num_ftrs,num_classes)    input_size = 224  elif model_name == "vgg":    """ VGG11_bn    """    model_ft = models.vgg11_bn(pretrained=use_pretrained)    set_parameter_requires_grad(model_ft, feature_extract)    num_ftrs = model_ft.classifier[6].in_features    model_ft.classifier[6] = nn.Linear(num_ftrs,num_classes)    input_size = 224  elif model_name == "squeezenet":    """ Squeezenet    """    model_ft = models.squeezenet1_0(pretrained=use_pretrained)    set_parameter_requires_grad(model_ft, feature_extract)    model_ft.classifier[1] = nn.Conv2d(512, num_classes, kernel_size=(1,1), stride=(1,1))    model_ft.num_classes = num_classes    input_size = 224  elif model_name == "densenet":    """ Densenet    """    model_ft = models.densenet121(pretrained=use_pretrained)    set_parameter_requires_grad(model_ft, feature_extract)    num_ftrs = model_ft.classifier.in_features    model_ft.classifier = nn.Linear(num_ftrs, num_classes)     input_size = 224  elif model_name == "inception":    """ Inception v3     Be careful, expects (299,299) sized images and has auxiliary output    """    model_ft = models.inception_v3(pretrained=use_pretrained)    set_parameter_requires_grad(model_ft, feature_extract)    # Handle the auxilary net    num_ftrs = model_ft.AuxLogits.fc.in_features    model_ft.AuxLogits.fc = nn.Linear(num_ftrs, num_classes)    # Handle the primary net    num_ftrs = model_ft.fc.in_features    model_ft.fc = nn.Linear(num_ftrs,num_classes)    input_size = 299  else:    print("Invalid model name, exiting...")    exit()    return model_ft, input_size# Initialize the model for this runmodel_ft, input_size = initialize_model(model_name, num_classes, feature_extract, use_pretrained=True)# Print the model we just instantiated#print(model_ft) #準備數據data_transforms = {  'train': transforms.Compose([    transforms.RandomResizedCrop(input_size),    transforms.RandomHorizontalFlip(),    transforms.ToTensor(),    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])  ]),  'val': transforms.Compose([    transforms.Resize(input_size),    transforms.CenterCrop(input_size),    transforms.ToTensor(),    transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])  ]),}print("Initializing Datasets and Dataloaders...")# Create training and validation datasetsimage_datasets = {x: datasets.ImageFolder(os.path.join(data_dir, x), data_transforms[x]) for x in ['train', 'val']}# Create training and validation dataloadersdataloaders_dict = {x: torch.utils.data.DataLoader(image_datasets[x], batch_size=batch_size, shuffle=True, num_workers=0) for x in ['train', 'val']}# Detect if we have a GPU availabledevice = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")'''是否加載之前訓練過的模型we='/home/dell/Desktop/dj/inception_050.pth'model_ft.load_state_dict(torch.load(we))'''# Send the model to GPUmodel_ft = model_ft.to(device)params_to_update = model_ft.parameters()print("Params to learn:")if feature_extract:  params_to_update = []  for name,param in model_ft.named_parameters():    if param.requires_grad == True:      params_to_update.append(param)      print("/t",name)else:  for name,param in model_ft.named_parameters():    if param.requires_grad == True:      print("/t",name)# Observe that all parameters are being optimizedoptimizer_ft = optim.SGD(params_to_update, lr=0.001, momentum=0.9)# Decay LR by a factor of 0.1 every 7 epochs#exp_lr_scheduler = lr_scheduler.StepLR(optimizer_ft, step_size=30, gamma=0.95)# Setup the loss fxncriterion = nn.CrossEntropyLoss()# Train and evaluatemodel_ft, hist = train_model(model_ft, dataloaders_dict, criterion, optimizer_ft, num_epochs=num_epochs, is_inception=(model_name=="inception"))'''#隨機初始化時的訓練程序# Initialize the non-pretrained version of the model used for this runscratch_model,_ = initialize_model(model_name, num_classes, feature_extract=False, use_pretrained=False)scratch_model = scratch_model.to(device)scratch_optimizer = optim.SGD(scratch_model.parameters(), lr=0.001, momentum=0.9)scratch_criterion = nn.CrossEntropyLoss()_,scratch_hist = train_model(scratch_model, dataloaders_dict, scratch_criterion, scratch_optimizer, num_epochs=num_epochs, is_inception=(model_name=="inception"))# Plot the training curves of validation accuracy vs. number # of training epochs for the transfer learning method and# the model trained from scratchohist = []shist = []ohist = [h.cpu().numpy() for h in hist]shist = [h.cpu().numpy() for h in scratch_hist]plt.title("Validation Accuracy vs. Number of Training Epochs")plt.xlabel("Training Epochs")plt.ylabel("Validation Accuracy")plt.plot(range(1,num_epochs+1),ohist,label="Pretrained")plt.plot(range(1,num_epochs+1),shist,label="Scratch")plt.ylim((0,1.))plt.xticks(np.arange(1, num_epochs+1, 1.0))plt.legend()plt.show()'''
發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
亚洲香蕉成人av网站在线观看_欧美精品成人91久久久久久久_久久久久久久久久久亚洲_热久久视久久精品18亚洲精品_国产精自产拍久久久久久_亚洲色图国产精品_91精品国产网站_中文字幕欧美日韩精品_国产精品久久久久久亚洲调教_国产精品久久一区_性夜试看影院91社区_97在线观看视频国产_68精品久久久久久欧美_欧美精品在线观看_国产精品一区二区久久精品_欧美老女人bb
国产一区二区三区精品久久久| 欧美一级视频免费在线观看| 亚洲综合一区二区不卡| 精品爽片免费看久久| 久久免费福利视频| 成人观看高清在线观看免费| 成人黄色免费在线观看| 亚洲国产三级网| 国产成人精品av在线| 国产亚洲精品va在线观看| 精品福利视频导航| 青青青国产精品一区二区| 欧美精品少妇videofree| 精品国产欧美一区二区三区成人| 色综合伊人色综合网站| 亚洲一区二区少妇| 久久久久久久999精品视频| 疯狂欧美牲乱大交777| 91久久综合亚洲鲁鲁五月天| www.日韩.com| 懂色aⅴ精品一区二区三区蜜月| 亚洲少妇中文在线| 国产69精品99久久久久久宅男| 国产精品网红福利| 91青草视频久久| 亚洲电影免费观看高清完整版| 在线播放精品一区二区三区| 久久久91精品国产| 欧美精品18videos性欧| 国产精品亚洲综合天堂夜夜| 色视频www在线播放国产成人| 在线国产精品视频| 久久精品国产综合| 亚洲国产成人久久| 久久伊人精品一区二区三区| 色悠久久久久综合先锋影音下载| 国产精品久久久久77777| 黄色一区二区三区| 丁香五六月婷婷久久激情| 亚洲综合中文字幕在线观看| 亚洲男人天天操| 亚洲xxx视频| 久久伊人精品一区二区三区| 精品久久香蕉国产线看观看亚洲| 精品香蕉一区二区三区| 亚洲综合在线播放| 欧美黄网免费在线观看| 欧美一级免费看| 91在线观看免费高清| 久久精品国产91精品亚洲| 欧美另类交人妖| 国产网站欧美日韩免费精品在线观看| 亚洲国产精品va在线看黑人| 精品亚洲永久免费精品| 三级精品视频久久久久| 欧美日韩成人精品| 欧美成人激情在线| 91av中文字幕| 欧美视频精品一区| 欧美日韩中文在线| 久热在线中文字幕色999舞| 亚洲一区二区久久久久久久| 欧美日韩亚洲一区二区| 成人国内精品久久久久一区| 久久99精品久久久久久琪琪| 国产精品第2页| 国产精品久久久久久久天堂| 欧美亚洲在线播放| 尤物九九久久国产精品的特点| 久久久久久免费精品| 亚洲精品黄网在线观看| 日韩最新av在线| 日韩欧美在线视频观看| 亚洲性夜色噜噜噜7777| 国产精品毛片a∨一区二区三区|国| 中文字幕精品www乱入免费视频| 日韩黄色高清视频| 中文字幕亚洲无线码在线一区| 黑人巨大精品欧美一区二区三区| 国产美女久久精品| 国产精品高潮呻吟视频| 最好看的2019的中文字幕视频| 中文字幕日韩精品在线| 亚洲精品久久久久久久久久久久| 国产精品一区二区三区久久| 国产精品电影久久久久电影网| 国产精品高清在线观看| 亚洲精品午夜精品| 久久久久久久91| 国产成人福利夜色影视| 亚洲福利视频专区| 日韩麻豆第一页| 亚洲欧美中文在线视频| 久久影院免费观看| 亚洲激情视频在线| 欧美理论片在线观看| 性色av一区二区三区在线观看| 成人h片在线播放免费网站| 亚洲国产精品va| 日韩精品免费在线观看| 欧美性视频网站| 欧美极度另类性三渗透| 国产一区二区动漫| 久久久久久久久综合| 狠狠色狠狠色综合日日五| 91亚洲精品一区| 亚洲激情视频网站| 美女黄色丝袜一区| 性色av一区二区三区在线观看| 一区二区三区无码高清视频| 国产精品国产亚洲伊人久久| 国产剧情日韩欧美| 久久97精品久久久久久久不卡| 久久国产精品影片| 97国产在线视频| 日韩美女中文字幕| 狠狠做深爱婷婷久久综合一区| 久久免费在线观看| 2019中文字幕免费视频| 日韩高清av一区二区三区| 国产盗摄xxxx视频xxx69| 国产精品久久久久久久7电影| 久久亚洲一区二区三区四区五区高| 亚洲欧美国产精品专区久久| 欧美激情高清视频| 久久久精品免费| 国产香蕉精品视频一区二区三区| 97精品伊人久久久大香线蕉| 欧美电影免费播放| 欧美在线一级视频| 国产精品视频资源| 国产精品久久视频| 亚洲一区二区黄| 欧洲亚洲免费视频| 视频直播国产精品| 欧美激情一级二级| 亚洲欧洲在线视频| 4438全国成人免费| 国产精品一香蕉国产线看观看| 欧美性色xo影院| 欧美激情精品久久久久久蜜臀| 成人免费看黄网站| 亚洲欧美日韩一区二区三区在线| 欧美最猛性xxxxx亚洲精品| 亚洲成人av中文字幕| 久久久精品在线观看| 国产91精品最新在线播放| 欧美精品18videosex性欧美| 欧美黑人巨大xxx极品| 青青草原一区二区| 欧美在线视频一区二区| 国产成人精品久久久| 永久免费精品影视网站| 韩国国内大量揄拍精品视频| 欧美成人午夜激情视频| 久久影视电视剧免费网站清宫辞电视| 亚洲色图美腿丝袜| 91成人在线播放| 国产精品伦子伦免费视频| 亚洲欧洲在线看| 97久久伊人激情网| 国产精品高潮呻吟久久av野狼| 91久久久久久| 国产91精品久|