편집 기록

편집 기록
  • 프로필 엽토군님의 편집
    날짜2020.12.12

    머신러닝 문제


    마지막 실행을 위한 model.fit 전까지는 오류가 안났는 데 실행 시키자 마자 오류가 떴습니다.
    처리 과정 중에 문제가 있었던 것 같은 데 알려주시면 감사하겠습니다.

    import tensorflow as tf
    from tensorflow.keras import layers
    
    from tensorflow.keras import datasets
    (train_x, train_y), (test_x, test_y) = datasets.mnist.load_data()
    
    input = layers.Input([28,28,1])
    net = layers.Conv2D(32,3,1,'SAME',activation = 'relu')(input)
    net = layers.Conv2D(32,3,1,'SAME',activation = 'relu')(net) 
    net = layers.MaxPooling2D(pool_size = (2,2))(net)  
    net = layers.Dropout(0.25)(net)
    
    net = layers.Conv2D(64, (3, 3), padding='SAME',activation = 'relu')(net)
    net = layers.Conv2D(64, (3, 3), padding='SAME',activation = 'relu')(net)
    net = layers.MaxPooling2D(pool_size=(2, 2))(net)
    net = layers.Dropout(0.25)(net)
    
    net = layers.Flatten()(net)
    net = layers.Dense(512,activation = 'relu')(net)
    
    net = layers.Dropout(0.5)(net)
    net = layers.Dense(10,activation = 'softmax')(net)  # num_classes
    
    model = tf.keras.Model(inputs=input, outputs=net, name='Basic_CNN')
    
    ###위와 같이  모델을 만들었다
    
    model.compile(loss = 'sparse_categorical_crossentropy',
                  optimizer = tf.keras.optimizers.Adam(), 
                  metrics = [tf.keras.metrics.Accuracy()])
    
    ###위를 통해서 이제 모델은 다 만들었다 이제 데이터를 집어넣자
    
    train_x.shape,train_y.shape
    
    import numpy as np
    
    train_x = train_x[...,tf.newaxis] 
    test_x = test_x[...,tf.newaxis]
    
    # 자료가 커서 나눔으로써 자료를 줄였다
    train_x = train_x/255
    test_x = test_x/255
    
    ### 데이터도 다 만들었으니 이제 학습에 
    
    model.fit(train_x,train_y, batch_size=32, epochs=10,shuffle = True)
    

    이까지가 제 코드입니다.

    이거에 대한 오류메세지 입니다

    ValueError                                Traceback (most recent call last)
    <ipython-input-1-369acf19b763> in <module>
         49 # 한번에 학습할 때 사용하는 데이터 갯수 batch_size
         50 
    ---> 51 model.fit(train_x,train_y, batch_size=32, epochs=10,shuffle = True)
    
    ~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)
        106   def _method_wrapper(self, *args, **kwargs):
        107     if not self._in_multi_worker_mode():  # pylint: disable=protected-access
    --> 108       return method(self, *args, **kwargs)
        109 
        110     # Running inside `run_distribute_coordinator` already.
    
    ~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
       1096                 batch_size=batch_size):
       1097               callbacks.on_train_batch_begin(step)
    -> 1098               tmp_logs = train_function(iterator)
       1099               if data_handler.should_sync:
       1100                 context.async_wait()
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)
        778       else:
        779         compiler = "nonXla"
    --> 780         result = self._call(*args, **kwds)
        781 
        782       new_tracing_count = self._get_tracing_count()
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
        821       # This is the first call of __call__, so we have to initialize.
        822       initializers = []
    --> 823       self._initialize(args, kwds, add_initializers_to=initializers)
        824     finally:
        825       # At this point we know that the initialization is complete (or less
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
        694     self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
        695     self._concrete_stateful_fn = (
    --> 696         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
        697             *args, **kwds))
        698 
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
       2853       args, kwargs = None, None
       2854     with self._lock:
    -> 2855       graph_function, _, _ = self._maybe_define_function(args, kwargs)
       2856     return graph_function
       2857 
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
       3211 
       3212       self._function_cache.missed.add(call_context_key)
    -> 3213       graph_function = self._create_graph_function(args, kwargs)
       3214       self._function_cache.primary[cache_key] = graph_function
       3215       return graph_function, args, kwargs
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
       3063     arg_names = base_arg_names + missing_arg_names
       3064     graph_function = ConcreteFunction(
    -> 3065         func_graph_module.func_graph_from_py_func(
       3066             self._name,
       3067             self._python_function,
    
    ~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
        984         _, original_func = tf_decorator.unwrap(python_func)
        985 
    --> 986       func_outputs = python_func(*func_args, **func_kwargs)
        987 
        988       # invariant: `func_outputs` contains only Tensors, CompositeTensors,
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
        598         # __wrapped__ allows AutoGraph to swap in a converted function. We give
        599         # the function a weak reference to itself to avoid a reference cycle.
    --> 600         return weak_wrapped_fn().__wrapped__(*args, **kwds)
        601     weak_wrapped_fn = weakref.ref(wrapped_fn)
        602 
    
    ~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
        971           except Exception as e:  # pylint:disable=broad-except
        972             if hasattr(e, "ag_error_metadata"):
    --> 973               raise e.ag_error_metadata.to_exception(e)
        974             else:
        975               raise
    
    ValueError: in user code:
    
    C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:806 train_function  *
            return step_function(self, iterator)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:796 step_function  **
            outputs = model.distribute_strategy.run(run_step, args=(data,))
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1211 run
            return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2585 call_for_each_replica
            return self._call_for_each_replica(fn, args, kwargs)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2945 _call_for_each_replica
            return fn(*args, **kwargs)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:789 run_step  **
            outputs = model.train_step(data)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:759 train_step
            self.compiled_metrics.update_state(y, y_pred, sample_weight)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\compile_utils.py:409 update_state
            metric_obj.update_state(y_t, y_p, sample_weight=mask)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\utils\metrics_utils.py:90 decorated
            update_op = update_state_fn(*args, **kwargs)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\metrics.py:176 update_state_fn
            return ag_update_state(*args, **kwargs)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\metrics.py:612 update_state  **
            matches = ag_fn(y_true, y_pred, **self._fn_kwargs)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\metrics.py:3208 accuracy  **
            y_pred.shape.assert_is_compatible_with(y_true.shape)
        C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\framework\tensor_shape.py:1134 assert_is_compatible_with
            raise ValueError("Shapes %s and %s are incompatible" % (self, other))
    
    
        ValueError: Shapes (32, 10) and (32, 1) are incompatible
    
    
  • 프로필 김현재님의 편집
    날짜2020.12.12

    머신러닝 문제


    마지막 실행을 위한 model.fit 전까지는 오류가 안났는 데 실행 시키자 마자 오류가 떴습니다.

    처리 과정 중에 문제가 있었던 것 같은 데 알려주시면 감사하겠습니다.

    '''import tensorflow as tf
    from tensorflow.keras import layers
    
    from tensorflow.keras import datasets
    (train_x, train_y), (test_x, test_y) = datasets.mnist.load_data()
    
    input = layers.Input([28,28,1])
    net = layers.Conv2D(32,3,1,'SAME',activation = 'relu')(input)
    net = layers.Conv2D(32,3,1,'SAME',activation = 'relu')(net) 
    net = layers.MaxPooling2D(pool_size = (2,2))(net)  
    net = layers.Dropout(0.25)(net)
    
    net = layers.Conv2D(64, (3, 3), padding='SAME',activation = 'relu')(net)
    net = layers.Conv2D(64, (3, 3), padding='SAME',activation = 'relu')(net)
    net = layers.MaxPooling2D(pool_size=(2, 2))(net)
    net = layers.Dropout(0.25)(net)
    
    net = layers.Flatten()(net)
    net = layers.Dense(512,activation = 'relu')(net)
    
    net = layers.Dropout(0.5)(net)
    net = layers.Dense(10,activation = 'softmax')(net)  # num_classes
    
    model = tf.keras.Model(inputs=input, outputs=net, name='Basic_CNN')'''
    

    위와 같이 모델을 만들었다

    
    '''model.compile(loss = 'sparse_categorical_crossentropy',
                  optimizer = tf.keras.optimizers.Adam(), 
                  metrics = [tf.keras.metrics.Accuracy()]) '''
    

    위를 통해서 이제 모델은 다 만들었다 이제 데이터를 집어넣자

    '''train_x.shape,train_y.shape
    
    import numpy as np
    
    train_x = train_x[...,tf.newaxis] 
    test_x = test_x[...,tf.newaxis]
    
    # 자료가 커서 나눔으로써 자료를 줄였다
    train_x = train_x/255
    test_x = test_x/255'''
    

    데이터도 다 만들었으니 이제 학습에

    '''model.fit(train_x,train_y, batch_size=32, epochs=10,shuffle = True)'''
    

    이까지가 제 코드입니다.

    이거에 대한 오류메세지 입니다

    ValueError                                Traceback (most recent call last)
    <ipython-input-1-369acf19b763> in <module>
         49 # 한번에 학습할 때 사용하는 데이터 갯수 batch_size
         50 
    ---> 51 model.fit(train_x,train_y, batch_size=32, epochs=10,shuffle = True)
    
    ~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in _method_wrapper(self, *args, **kwargs)
        106   def _method_wrapper(self, *args, **kwargs):
        107     if not self._in_multi_worker_mode():  # pylint: disable=protected-access
    --> 108       return method(self, *args, **kwargs)
        109 
        110     # Running inside `run_distribute_coordinator` already.
    
    ~\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)
       1096                 batch_size=batch_size):
       1097               callbacks.on_train_batch_begin(step)
    -> 1098               tmp_logs = train_function(iterator)
       1099               if data_handler.should_sync:
       1100                 context.async_wait()
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in __call__(self, *args, **kwds)
        778       else:
        779         compiler = "nonXla"
    --> 780         result = self._call(*args, **kwds)
        781 
        782       new_tracing_count = self._get_tracing_count()
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _call(self, *args, **kwds)
        821       # This is the first call of __call__, so we have to initialize.
        822       initializers = []
    --> 823       self._initialize(args, kwds, add_initializers_to=initializers)
        824     finally:
        825       # At this point we know that the initialization is complete (or less
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in _initialize(self, args, kwds, add_initializers_to)
        694     self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
        695     self._concrete_stateful_fn = (
    --> 696         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
        697             *args, **kwds))
        698 
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
       2853       args, kwargs = None, None
       2854     with self._lock:
    -> 2855       graph_function, _, _ = self._maybe_define_function(args, kwargs)
       2856     return graph_function
       2857 
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _maybe_define_function(self, args, kwargs)
       3211 
       3212       self._function_cache.missed.add(call_context_key)
    -> 3213       graph_function = self._create_graph_function(args, kwargs)
       3214       self._function_cache.primary[cache_key] = graph_function
       3215       return graph_function, args, kwargs
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
       3063     arg_names = base_arg_names + missing_arg_names
       3064     graph_function = ConcreteFunction(
    -> 3065         func_graph_module.func_graph_from_py_func(
       3066             self._name,
       3067             self._python_function,
    
    ~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
        984         _, original_func = tf_decorator.unwrap(python_func)
        985 
    --> 986       func_outputs = python_func(*func_args, **func_kwargs)
        987 
        988       # invariant: `func_outputs` contains only Tensors, CompositeTensors,
    
    ~\anaconda3\lib\site-packages\tensorflow\python\eager\def_function.py in wrapped_fn(*args, **kwds)
        598         # __wrapped__ allows AutoGraph to swap in a converted function. We give
        599         # the function a weak reference to itself to avoid a reference cycle.
    --> 600         return weak_wrapped_fn().__wrapped__(*args, **kwds)
        601     weak_wrapped_fn = weakref.ref(wrapped_fn)
        602 
    
    ~\anaconda3\lib\site-packages\tensorflow\python\framework\func_graph.py in wrapper(*args, **kwargs)
        971           except Exception as e:  # pylint:disable=broad-except
        972             if hasattr(e, "ag_error_metadata"):
    --> 973               raise e.ag_error_metadata.to_exception(e)
        974             else:
        975               raise
    
    ValueError: in user code:
    

    ''' C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:806 train_function * return step_function(self, iterator) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:796 step_function ** outputs = model.distribute_strategy.run(run_step, args=(data,)) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:1211 run return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2585 call_for_each_replica return self._call_for_each_replica(fn, args, kwargs) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\distribute\distribute_lib.py:2945 _call_for_each_replica return fn(args, **kwargs) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:789 run_step * outputs = model.train_step(data) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\training.py:759 train_step self.compiled_metrics.update_state(y, y_pred, sample_weight) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\engine\compile_utils.py:409 update_state metric_obj.update_state(y_t, y_p, sample_weight=mask) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\utils\metrics_utils.py:90 decorated update_op = update_state_fn(args, **kwargs) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\metrics.py:176 update_state_fn return ag_update_state(*args, **kwargs) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\metrics.py:612 update_state * matches = ag_fn(y_true, y_pred, *self._fn_kwargs) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\keras\metrics.py:3208 accuracy * y_pred.shape.assert_is_compatible_with(y_true.shape) C:\Users\a0108\anaconda3\lib\site-packages\tensorflow\python\framework\tensor_shape.py:1134 assert_is_compatible_with raise ValueError("Shapes %s and %s are incompatible" % (self, other))''' ```

    ValueError: Shapes (32, 10) and (32, 1) are incompatible