)]}'
{"/COMMIT_MSG":[{"author":{"_account_id":8688,"name":"Alexis Lee","email":"openstack@lxsli.co.uk","username":"lxsli"},"change_message_id":"5d291b06bd74d20ee20ce670af4eb8e4550c0bae","unresolved":false,"context_lines":[{"line_number":13,"context_line":"neutronv2.API.allocate_for_instance calls that both first queries the"},{"line_number":14,"context_line":"device_id of the port from neutron and both see that it is empty so both"},{"line_number":15,"context_line":"update the device_id with the current instance.uuid."},{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"},{"line_number":20,"context_line":"on the port and abort the build which lost the ownership of a port due to"},{"line_number":21,"context_line":"this race condition."}],"source_content_type":"text/x-gerrit-commit-message","patch_set":2,"id":"9a1a9d01_635f2d0f","line":18,"range":{"start_line":16,"start_character":0,"end_line":18,"end_character":11},"updated":"2015-09-24 16:10:42.000000000","message":"so where\u0027s the Neutron patch? Not to seem ungrateful for your work here but if this is best fixed in Neutron, let\u0027s do that.","commit_id":"d0cb84f03fb034096fda2353d2e28ca3236c7ab7"},{"author":{"_account_id":8688,"name":"Alexis Lee","email":"openstack@lxsli.co.uk","username":"lxsli"},"change_message_id":"1feaa428115f032d64c8f48aaaa9b1f8c436eb83","unresolved":false,"context_lines":[{"line_number":13,"context_line":"neutronv2.API.allocate_for_instance calls that both first queries the"},{"line_number":14,"context_line":"device_id of the port from neutron and both see that it is empty so both"},{"line_number":15,"context_line":"update the device_id with the current instance.uuid."},{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"},{"line_number":20,"context_line":"on the port and abort the build which lost the ownership of a port due to"},{"line_number":21,"context_line":"this race condition."}],"source_content_type":"text/x-gerrit-commit-message","patch_set":2,"id":"9a1a9d01_c792654b","line":18,"range":{"start_line":16,"start_character":0,"end_line":18,"end_character":11},"in_reply_to":"9a1a9d01_3bdfc36a","updated":"2015-10-05 10:16:21.000000000","message":"OK can you add the bug # to the commit message please?","commit_id":"d0cb84f03fb034096fda2353d2e28ca3236c7ab7"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"b01102f53f3540283af403a9a127896a6351368e","unresolved":false,"context_lines":[{"line_number":13,"context_line":"neutronv2.API.allocate_for_instance calls that both first queries the"},{"line_number":14,"context_line":"device_id of the port from neutron and both see that it is empty so both"},{"line_number":15,"context_line":"update the device_id with the current instance.uuid."},{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"},{"line_number":20,"context_line":"on the port and abort the build which lost the ownership of a port due to"},{"line_number":21,"context_line":"this race condition."}],"source_content_type":"text/x-gerrit-commit-message","patch_set":2,"id":"9a1a9d01_3bdfc36a","line":18,"range":{"start_line":16,"start_character":0,"end_line":18,"end_character":11},"in_reply_to":"9a1a9d01_635f2d0f","updated":"2015-09-28 09:36:58.000000000","message":"Solving this needs a major API rework in neutron. I guess this won\u0027t happen in the near future. I have no deep neutron knowledge so I don\u0027t feel the power to propose the this big change in neutron. However I opened a neutron bug so that the community can be aware of the problem: https://bugs.launchpad.net/neutron/+bug/1500365\n\nI think this nova commit should not be blocked with the reason that the root of the problem needs to be solved neutron as this commit can help avoiding the issue in nova until the neutron is fixed.","commit_id":"d0cb84f03fb034096fda2353d2e28ca3236c7ab7"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"fd24ca878a113742dd644171eb00a8968b14d5cc","unresolved":false,"context_lines":[{"line_number":13,"context_line":"neutronv2.API.allocate_for_instance calls that both first queries the"},{"line_number":14,"context_line":"device_id of the port from neutron and both see that it is empty so both"},{"line_number":15,"context_line":"update the device_id with the current instance.uuid."},{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"},{"line_number":20,"context_line":"on the port and abort the build which lost the ownership of a port due to"},{"line_number":21,"context_line":"this race condition."}],"source_content_type":"text/x-gerrit-commit-message","patch_set":2,"id":"7a2fa921_500e4d35","line":18,"range":{"start_line":16,"start_character":0,"end_line":18,"end_character":11},"in_reply_to":"9a1a9d01_c792654b","updated":"2015-10-06 12:10:02.000000000","message":"Good point, I added it.","commit_id":"d0cb84f03fb034096fda2353d2e28ca3236c7ab7"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"c2dd9d53ae8ba4d31d66ef5eb01fb9c03546122b","unresolved":false,"context_lines":[{"line_number":13,"context_line":"neutronv2.API.allocate_for_instance calls that both first queries the"},{"line_number":14,"context_line":"device_id of the port from neutron and both see that it is empty so both"},{"line_number":15,"context_line":"update the device_id with the current instance.uuid."},{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"}],"source_content_type":"text/x-gerrit-commit-message","patch_set":7,"id":"7af24918_89d52844","line":16,"updated":"2016-03-04 16:49:01.000000000","message":"s/avoid/avoided/","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"e56145a96063e873dbbec98b3ab3cef66c010c83","unresolved":false,"context_lines":[{"line_number":13,"context_line":"neutronv2.API.allocate_for_instance calls that both first queries the"},{"line_number":14,"context_line":"device_id of the port from neutron and both see that it is empty so both"},{"line_number":15,"context_line":"update the device_id with the current instance.uuid."},{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"}],"source_content_type":"text/x-gerrit-commit-message","patch_set":7,"id":"5aef4532_69aff0e3","line":16,"in_reply_to":"7af24918_89d52844","updated":"2016-03-07 10:25:48.000000000","message":"Done","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"c2dd9d53ae8ba4d31d66ef5eb01fb9c03546122b","unresolved":false,"context_lines":[{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"},{"line_number":20,"context_line":"on the port and abort the build which lost the ownership of a port due to"},{"line_number":21,"context_line":"this race condition."},{"line_number":22,"context_line":""}],"source_content_type":"text/x-gerrit-commit-message","patch_set":7,"id":"7af24918_e93f8c84","line":19,"updated":"2016-03-04 16:49:01.000000000","message":"d/happened/","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"e56145a96063e873dbbec98b3ab3cef66c010c83","unresolved":false,"context_lines":[{"line_number":16,"context_line":"This race condition cannot be totally avoid with nova modification as"},{"line_number":17,"context_line":"the solution would need an atomic port update API with test-and-set semantic"},{"line_number":18,"context_line":"in neutron."},{"line_number":19,"context_line":"So this patch implements a way to detect that parallel update happened"},{"line_number":20,"context_line":"on the port and abort the build which lost the ownership of a port due to"},{"line_number":21,"context_line":"this race condition."},{"line_number":22,"context_line":""}],"source_content_type":"text/x-gerrit-commit-message","patch_set":7,"id":"5aef4532_a9b9f830","line":19,"in_reply_to":"7af24918_e93f8c84","updated":"2016-03-07 10:25:48.000000000","message":"Done","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"}],"nova/api/openstack/compute/attach_interfaces.py":[{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":128,"context_line":"        except (exception.InstanceIsLocked,"},{"line_number":129,"context_line":"                exception.FixedIpAlreadyInUse,"},{"line_number":130,"context_line":"                exception.PortInUse) as e:"},{"line_number":131,"context_line":"            raise exc.HTTPConflict(explanation\u003de.format_message())"},{"line_number":132,"context_line":"        except (exception.PortNotFound,"},{"line_number":133,"context_line":"                exception.NetworkNotFound) as e:"},{"line_number":134,"context_line":"            raise exc.HTTPNotFound(explanation\u003de.format_message())"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_e307237f","line":131,"updated":"2016-11-24 15:45:49.000000000","message":"Conflict is technically more accurate for this case I feel.","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"}],"nova/compute/manager.py":[{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":1932,"context_line":"            raise exception.RescheduledException("},{"line_number":1933,"context_line":"                    instance_uuid\u003dinstance.uuid, reason\u003de.format_message())"},{"line_number":1934,"context_line":"        except (exception.BuildAbortException,"},{"line_number":1935,"context_line":"                exception.PortsConflict) as e:"},{"line_number":1936,"context_line":"            with excutils.save_and_reraise_exception():"},{"line_number":1937,"context_line":"                LOG.debug(e.format_message(), instance\u003dinstance)"},{"line_number":1938,"context_line":"                self._notify_about_instance_usage(context, instance,"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_23ebdbb1","line":1935,"range":{"start_line":1935,"start_character":26,"end_line":1935,"end_character":39},"updated":"2016-11-24 15:45:49.000000000","message":"Why couldn\u0027t this be PortInUse again?","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"}],"nova/exception.py":[{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"713065055d2903a03b41e88d1470dbb076c3910d","unresolved":false,"context_lines":[{"line_number":1597,"context_line":"    msg_fmt \u003d _(\"Build of instance %(instance_uuid)s aborted: %(reason)s\")"},{"line_number":1598,"context_line":""},{"line_number":1599,"context_line":""},{"line_number":1600,"context_line":"class PortConflict(NovaException):"},{"line_number":1601,"context_line":"    msg_fmt \u003d _(\"The ports %(ports)s are requested for the instance \""},{"line_number":1602,"context_line":"                \"%(instance_uuid)s but already in use\")"},{"line_number":1603,"context_line":""}],"source_content_type":"text/x-python","patch_set":15,"id":"1ac06dbe_0b751363","line":1600,"range":{"start_line":1600,"start_character":6,"end_line":1600,"end_character":10},"updated":"2016-08-23 17:05:50.000000000","message":"nit: PortsConflict according to the following message?","commit_id":"b1aee7f06b1716d41408c885e06a7fecc8c26948"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"3280fd999175952a0dac1cd76d408cf22e1bfbb1","unresolved":false,"context_lines":[{"line_number":1597,"context_line":"    msg_fmt \u003d _(\"Build of instance %(instance_uuid)s aborted: %(reason)s\")"},{"line_number":1598,"context_line":""},{"line_number":1599,"context_line":""},{"line_number":1600,"context_line":"class PortConflict(NovaException):"},{"line_number":1601,"context_line":"    msg_fmt \u003d _(\"The ports %(ports)s are requested for the instance \""},{"line_number":1602,"context_line":"                \"%(instance_uuid)s but already in use\")"},{"line_number":1603,"context_line":""}],"source_content_type":"text/x-python","patch_set":15,"id":"1ac06dbe_89f19d6d","line":1600,"range":{"start_line":1600,"start_character":6,"end_line":1600,"end_character":10},"in_reply_to":"1ac06dbe_0b751363","updated":"2016-08-24 12:58:04.000000000","message":"Done","commit_id":"b1aee7f06b1716d41408c885e06a7fecc8c26948"}],"nova/network/neutronv2/api.py":[{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"8262b4622b8edddde13cf991f27d5658685994f7","unresolved":false,"context_lines":[{"line_number":651,"context_line":"            update_cells\u003dTrue)"},{"line_number":652,"context_line":""},{"line_number":653,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":654,"context_line":"        # neutron so if there was a parallel port.update from another"},{"line_number":655,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":656,"context_line":"        # that the original request so we can detect the race here"},{"line_number":657,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"}],"source_content_type":"text/x-python","patch_set":6,"id":"7af24918_28a4f9b7","line":654,"range":{"start_line":654,"start_character":10,"end_line":654,"end_character":20},"updated":"2016-03-03 23:56:27.000000000","message":"nit: s/neutron so/neutron. So/","commit_id":"4dc9f389196f118eda00fde42c512448f14b1643"},{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"8262b4622b8edddde13cf991f27d5658685994f7","unresolved":false,"context_lines":[{"line_number":653,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":654,"context_line":"        # neutron so if there was a parallel port.update from another"},{"line_number":655,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":656,"context_line":"        # that the original request so we can detect the race here"},{"line_number":657,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":658,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":659,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":6,"id":"7af24918_88938d5a","line":656,"range":{"start_line":656,"start_character":28,"end_line":656,"end_character":38},"updated":"2016-03-03 23:56:27.000000000","message":"nit: s/request so/request. So/","commit_id":"4dc9f389196f118eda00fde42c512448f14b1643"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"c2dd9d53ae8ba4d31d66ef5eb01fb9c03546122b","unresolved":false,"context_lines":[{"line_number":675,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":676,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":677,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":678,"context_line":"        # that the original request. So we can detect the race here"},{"line_number":679,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":680,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":681,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":7,"id":"7af24918_2975f462","line":678,"updated":"2016-03-04 16:49:01.000000000","message":"Please add a FIXME() and mention about the neutron bug.  This should be removed when it is fixed in neutron.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"281afe3eaf5dd0295a8e03749b1640068d90a7c8","unresolved":false,"context_lines":[{"line_number":675,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":676,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":677,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":678,"context_line":"        # that the original request. So we can detect the race here"},{"line_number":679,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":680,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":681,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_b4af83e2","line":678,"in_reply_to":"5aef4532_265877fa","updated":"2016-03-07 14:58:27.000000000","message":"I\u0027ve read the discussion on the neutron bug.  Looks like they will discuss it.\n\nI think if this is ever to be fixed in neutron, we\u0027ll need a different fix in nova to avoid this situation entirely where we create multiple instance with same port ID.  But that would require more work.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"feb38b31f740d6139e109e57d649ce1020464f16","unresolved":false,"context_lines":[{"line_number":675,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":676,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":677,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":678,"context_line":"        # that the original request. So we can detect the race here"},{"line_number":679,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":680,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":681,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_e52633b9","line":678,"in_reply_to":"5aef4532_b4af83e2","updated":"2016-03-08 16:41:56.000000000","message":"I agree that if this will be fixed in neutron then we have to adapt Nova to that fix.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"e56145a96063e873dbbec98b3ab3cef66c010c83","unresolved":false,"context_lines":[{"line_number":675,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":676,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":677,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":678,"context_line":"        # that the original request. So we can detect the race here"},{"line_number":679,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":680,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":681,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_265877fa","line":678,"in_reply_to":"7af24918_2975f462","updated":"2016-03-07 10:25:48.000000000","message":"Done. But please note that it is very unlikely that this will ever fixed in Neutron based on the PTL response in the bug report.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"c2dd9d53ae8ba4d31d66ef5eb01fb9c03546122b","unresolved":false,"context_lines":[{"line_number":676,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":677,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":678,"context_line":"        # that the original request. So we can detect the race here"},{"line_number":679,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":680,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":681,"context_line":"        if lost_ports:"},{"line_number":682,"context_line":"            self._unbind_ports(context,"}],"source_content_type":"text/x-python","patch_set":7,"id":"7af24918_a99744ea","line":679,"updated":"2016-03-04 16:49:01.000000000","message":"Nit: I would removed the \"still\" in the variable name.  This can be confusing.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"e56145a96063e873dbbec98b3ab3cef66c010c83","unresolved":false,"context_lines":[{"line_number":676,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":677,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":678,"context_line":"        # that the original request. So we can detect the race here"},{"line_number":679,"context_line":"        still_owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":680,"context_line":"        lost_ports \u003d set(ports_in_requested_order) - set(still_owned_ports)"},{"line_number":681,"context_line":"        if lost_ports:"},{"line_number":682,"context_line":"            self._unbind_ports(context,"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_06145bf3","line":679,"in_reply_to":"7af24918_a99744ea","updated":"2016-03-07 10:25:48.000000000","message":"Done","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"c2dd9d53ae8ba4d31d66ef5eb01fb9c03546122b","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            LOG.error(msg)"},{"line_number":692,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":693,"context_line":"                                                reason\u003dmsg)"},{"line_number":694,"context_line":""},{"line_number":695,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."},{"line_number":696,"context_line":"        # In the initial allocation case, this will be everything we created,"},{"line_number":697,"context_line":"        # and in later runs will only be what was created that time. Thus,"}],"source_content_type":"text/x-python","patch_set":7,"id":"7af24918_e9e5ccdb","line":694,"updated":"2016-03-04 16:49:01.000000000","message":"I\u0027m wondering if this logic should be moved in self.get_instance_nw_info() so that the race condition might be caught in similar cases (like migration, etc.).  What do you think ?","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"feb38b31f740d6139e109e57d649ce1020464f16","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            LOG.error(msg)"},{"line_number":692,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":693,"context_line":"                                                reason\u003dmsg)"},{"line_number":694,"context_line":""},{"line_number":695,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."},{"line_number":696,"context_line":"        # In the initial allocation case, this will be everything we created,"},{"line_number":697,"context_line":"        # and in later runs will only be what was created that time. Thus,"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_02487586","line":694,"in_reply_to":"5aef4532_34d09353","updated":"2016-03-08 16:41:56.000000000","message":"_update_port_binding_for_instance does not change the ownership of the port (does not change the device_id) so it cannot race the same way as the original problem. This function only modifies the ports that is owned by the given instance.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":17973,"name":"Ludovic Beliveau","email":"lbeliveau@gmail.com","username":"lbeliveau"},"change_message_id":"281afe3eaf5dd0295a8e03749b1640068d90a7c8","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            LOG.error(msg)"},{"line_number":692,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":693,"context_line":"                                                reason\u003dmsg)"},{"line_number":694,"context_line":""},{"line_number":695,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."},{"line_number":696,"context_line":"        # In the initial allocation case, this will be everything we created,"},{"line_number":697,"context_line":"        # and in later runs will only be what was created that time. Thus,"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_34d09353","line":694,"in_reply_to":"5aef4532_891a3c6d","updated":"2016-03-07 14:58:27.000000000","message":"I guess we can leave other cases for now.  Let\u0027s see what how this is going to get resolve in neutron first.\n\nFYI on migration, the ports are not created, but rather they are updated.  See _update_port_binding_for_instance().","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"e56145a96063e873dbbec98b3ab3cef66c010c83","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            LOG.error(msg)"},{"line_number":692,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":693,"context_line":"                                                reason\u003dmsg)"},{"line_number":694,"context_line":""},{"line_number":695,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."},{"line_number":696,"context_line":"        # In the initial allocation case, this will be everything we created,"},{"line_number":697,"context_line":"        # and in later runs will only be what was created that time. Thus,"}],"source_content_type":"text/x-python","patch_set":7,"id":"5aef4532_891a3c6d","line":694,"in_reply_to":"7af24918_e9e5ccdb","updated":"2016-03-07 10:25:48.000000000","message":"Could you be more specify where and how do you imagine checking this in the get_instance_nw_info?\n\nDoes nova release the neutron port (set the device_owner to empty) during migration? If not then this race cannot happen during migration.","commit_id":"e28175729be10c5ce64422e74651f4473ba5a9a1"},{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"6d9f03874995ce85b1a59bd0ba86731b783c50eb","unresolved":false,"context_lines":[{"line_number":688,"context_line":""},{"line_number":689,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":690,"context_line":"            # unnecessary reschedule"},{"line_number":691,"context_line":"            msg \u003d _(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                    \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":693,"context_line":"            LOG.error(msg)"},{"line_number":694,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"}],"source_content_type":"text/x-python","patch_set":8,"id":"5aef4532_ea1ab26d","line":691,"range":{"start_line":691,"start_character":18,"end_line":691,"end_character":20},"updated":"2016-03-07 19:18:18.000000000","message":"I guess here should be \"_LE(\" because this msg is passed to LOG.error().","commit_id":"2aa4289e8d46e2454a6d9286f5dd601e267174a4"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"0c36cd104948931a5c26c801983229ac81ac364d","unresolved":false,"context_lines":[{"line_number":688,"context_line":""},{"line_number":689,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":690,"context_line":"            # unnecessary reschedule"},{"line_number":691,"context_line":"            msg \u003d _(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                    \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":693,"context_line":"            LOG.error(msg)"},{"line_number":694,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"}],"source_content_type":"text/x-python","patch_set":8,"id":"5aef4532_a2f3819d","line":691,"range":{"start_line":691,"start_character":18,"end_line":691,"end_character":20},"in_reply_to":"5aef4532_ea1ab26d","updated":"2016-03-08 16:44:00.000000000","message":"Yes, it is passed to LOG.error() but also passed to the reason of the BuildAbortException. However I think it is safe to put _LE in both cases.","commit_id":"2aa4289e8d46e2454a6d9286f5dd601e267174a4"},{"author":{"_account_id":6873,"name":"Matt Riedemann","email":"mriedem.os@gmail.com","username":"mriedem"},"change_message_id":"2d522db824aecd37bb1654457bbd5fdb841b9e39","unresolved":false,"context_lines":[{"line_number":686,"context_line":"                               neutron, port_client)"},{"line_number":687,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":688,"context_line":""},{"line_number":689,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":690,"context_line":"            # unnecessary reschedule"},{"line_number":691,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"}],"source_content_type":"text/x-python","patch_set":9,"id":"5aef4532_8dec9f90","line":689,"updated":"2016-03-09 02:19:13.000000000","message":"Where is PortInUse going to trigger a reschedule? It looks to me like it would trigger a BuildAbortException here:\n\nhttps://github.com/openstack/nova/blob/master/nova/compute/manager.py#L2144","commit_id":"d3042511c7e34d0c110e13b81f2678a8026f5ef9"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"da937b9616c6ed4604e782fc53686cedb5bb82d3","unresolved":false,"context_lines":[{"line_number":686,"context_line":"                               neutron, port_client)"},{"line_number":687,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":688,"context_line":""},{"line_number":689,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":690,"context_line":"            # unnecessary reschedule"},{"line_number":691,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"}],"source_content_type":"text/x-python","patch_set":9,"id":"1af94dfe_69f33e74","line":689,"in_reply_to":"5aef4532_8dec9f90","updated":"2016-03-16 15:59:42.000000000","message":"As far as I understand in  the exception block here https://github.com/opentack/nova/blob/master/nova/compute/manager.py#L2068-L2116 the PortInUse only matches to the except Exception part, which cause a rechedule.","commit_id":"d3042511c7e34d0c110e13b81f2678a8026f5ef9"},{"author":{"_account_id":6873,"name":"Matt Riedemann","email":"mriedem.os@gmail.com","username":"mriedem"},"change_message_id":"42e523acd06973981a290ab7f708a4a5bc71e492","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":693,"context_line":"            LOG.error(msg)"},{"line_number":694,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":695,"context_line":"                                                reason\u003dmsg)"},{"line_number":696,"context_line":""},{"line_number":697,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."}],"source_content_type":"text/x-python","patch_set":9,"id":"5aef4532_0d9c0f68","line":694,"updated":"2016-03-09 02:07:32.000000000","message":"allocate_for_instance is also called when attaching interfaces (from allocate_port_for_instance), so this seems inappropriate to raise a BuildAbortException from the network API...","commit_id":"d3042511c7e34d0c110e13b81f2678a8026f5ef9"},{"author":{"_account_id":6873,"name":"Matt Riedemann","email":"mriedem.os@gmail.com","username":"mriedem"},"change_message_id":"2d522db824aecd37bb1654457bbd5fdb841b9e39","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":693,"context_line":"            LOG.error(msg)"},{"line_number":694,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":695,"context_line":"                                                reason\u003dmsg)"},{"line_number":696,"context_line":""},{"line_number":697,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."}],"source_content_type":"text/x-python","patch_set":9,"id":"5aef4532_6def9387","line":694,"in_reply_to":"5aef4532_0d9c0f68","updated":"2016-03-09 02:19:13.000000000","message":"Any other exception raised out of here will trigger a BuildAbortException in the manager:\n\nhttps://github.com/openstack/nova/blob/master/nova/compute/manager.py#L2144-L2151\n\nSo I think you should raise something more specific to the failure here, that could just be a NovaException with the msg you have already.","commit_id":"d3042511c7e34d0c110e13b81f2678a8026f5ef9"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"da937b9616c6ed4604e782fc53686cedb5bb82d3","unresolved":false,"context_lines":[{"line_number":691,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":692,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":693,"context_line":"            LOG.error(msg)"},{"line_number":694,"context_line":"            raise exception.BuildAbortException(instance_uuid\u003dinstance.uuid,"},{"line_number":695,"context_line":"                                                reason\u003dmsg)"},{"line_number":696,"context_line":""},{"line_number":697,"context_line":"        # NOTE(danms): Only return info about ports we created in this run."}],"source_content_type":"text/x-python","patch_set":9,"id":"1af94dfe_894a0a82","line":694,"in_reply_to":"5aef4532_6def9387","updated":"2016-03-16 15:59:42.000000000","message":"OK, lets raise a more specific exception here and handle that properly in https://github.com/openstack/nova/blob/master/nova/compute/manager.py#L2068-L2116 that will solve both your comment and my problem. :)","commit_id":"d3042511c7e34d0c110e13b81f2678a8026f5ef9"},{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"9b2818d60e27bf69ce2aa8be9ef5b956c3add75a","unresolved":false,"context_lines":[{"line_number":690,"context_line":"                               neutron, port_client)"},{"line_number":691,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":692,"context_line":""},{"line_number":693,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":694,"context_line":"            # unnecessary reschedule"},{"line_number":695,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":696,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":697,"context_line":"            LOG.error(msg)"},{"line_number":698,"context_line":"            raise exception.PortConflict(instance_uuid\u003dinstance.uuid,"},{"line_number":699,"context_line":"                                         ports\u003dstr(list(lost_ports)))"},{"line_number":700,"context_line":""}],"source_content_type":"text/x-python","patch_set":11,"id":"7aa08908_ccfe6404","line":697,"range":{"start_line":693,"start_character":0,"end_line":697,"end_character":26},"updated":"2016-06-18 01:11:54.000000000","message":"Do deployers/operators need to know this case every times?\nThis case is returned to users as BadRequest response with the detail reason and they can know that.","commit_id":"994248bf10b197f5d97ea7363208d4ddc241a575"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"379b57288f8cc31b65cd71d813fb3cbaf1594e53","unresolved":false,"context_lines":[{"line_number":690,"context_line":"                               neutron, port_client)"},{"line_number":691,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":692,"context_line":""},{"line_number":693,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":694,"context_line":"            # unnecessary reschedule"},{"line_number":695,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":696,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":697,"context_line":"            LOG.error(msg)"},{"line_number":698,"context_line":"            raise exception.PortConflict(instance_uuid\u003dinstance.uuid,"},{"line_number":699,"context_line":"                                         ports\u003dstr(list(lost_ports)))"},{"line_number":700,"context_line":""}],"source_content_type":"text/x-python","patch_set":11,"id":"dada55a8_81d1dc59","line":697,"range":{"start_line":693,"start_character":0,"end_line":697,"end_character":26},"in_reply_to":"7aa08908_ccfe6404","updated":"2016-07-26 16:38:12.000000000","message":"Do you suggest to move it to debug level? As the BadRequest is anyhow logged I agree that this can be a debug level log.","commit_id":"994248bf10b197f5d97ea7363208d4ddc241a575"},{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"713065055d2903a03b41e88d1470dbb076c3910d","unresolved":false,"context_lines":[{"line_number":809,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":810,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":811,"context_line":"        # that the original request. So we can detect the race here."},{"line_number":812,"context_line":"        # FIXME(gibi): Remove this double check when the bug #1500365 is fixed"},{"line_number":813,"context_line":"        # in Neutron"},{"line_number":814,"context_line":"        owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":815,"context_line":"        lost_ports \u003d set(ordered_ports) - set(owned_ports)"},{"line_number":816,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":15,"id":"1ac06dbe_0bfb537a","line":813,"range":{"start_line":812,"start_character":8,"end_line":813,"end_character":20},"updated":"2016-08-23 17:05:50.000000000","message":"Now the bug is marked as \"Won\u0027t Fix\" on Neutron side..","commit_id":"b1aee7f06b1716d41408c885e06a7fecc8c26948"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"3280fd999175952a0dac1cd76d408cf22e1bfbb1","unresolved":false,"context_lines":[{"line_number":809,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":810,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":811,"context_line":"        # that the original request. So we can detect the race here."},{"line_number":812,"context_line":"        # FIXME(gibi): Remove this double check when the bug #1500365 is fixed"},{"line_number":813,"context_line":"        # in Neutron"},{"line_number":814,"context_line":"        owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":815,"context_line":"        lost_ports \u003d set(ordered_ports) - set(owned_ports)"},{"line_number":816,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":15,"id":"1ac06dbe_49f8454a","line":813,"range":{"start_line":812,"start_character":8,"end_line":813,"end_character":20},"in_reply_to":"1ac06dbe_0bfb537a","updated":"2016-08-24 12:58:04.000000000","message":"Done","commit_id":"b1aee7f06b1716d41408c885e06a7fecc8c26948"},{"author":{"_account_id":6167,"name":"Ken\u0027ichi Ohmichi","email":"ken1ohmichi@gmail.com","username":"oomichi"},"change_message_id":"713065055d2903a03b41e88d1470dbb076c3910d","unresolved":false,"context_lines":[{"line_number":821,"context_line":""},{"line_number":822,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":823,"context_line":"            # unnecessary reschedule"},{"line_number":824,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":825,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":826,"context_line":"            LOG.debug(msg)"},{"line_number":827,"context_line":"            raise exception.PortConflict(instance_uuid\u003dinstance.uuid,"}],"source_content_type":"text/x-python","patch_set":15,"id":"1ac06dbe_4bc70b64","line":824,"range":{"start_line":824,"start_character":18,"end_line":824,"end_character":22},"updated":"2016-08-23 17:05:50.000000000","message":"This message is not passed as LOG.error. So _LE() is not valid.","commit_id":"b1aee7f06b1716d41408c885e06a7fecc8c26948"},{"author":{"_account_id":9708,"name":"Balazs Gibizer","display_name":"gibi","email":"gibizer@gmail.com","username":"gibi"},"change_message_id":"3280fd999175952a0dac1cd76d408cf22e1bfbb1","unresolved":false,"context_lines":[{"line_number":821,"context_line":""},{"line_number":822,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":823,"context_line":"            # unnecessary reschedule"},{"line_number":824,"context_line":"            msg \u003d _LE(\u0027Following ports are requested \u0027"},{"line_number":825,"context_line":"                      \u0027but already in use. %s\u0027) % str(list(lost_ports))"},{"line_number":826,"context_line":"            LOG.debug(msg)"},{"line_number":827,"context_line":"            raise exception.PortConflict(instance_uuid\u003dinstance.uuid,"}],"source_content_type":"text/x-python","patch_set":15,"id":"1ac06dbe_0973ede8","line":824,"range":{"start_line":824,"start_character":18,"end_line":824,"end_character":22},"in_reply_to":"1ac06dbe_4bc70b64","updated":"2016-08-24 12:58:04.000000000","message":"Done","commit_id":"b1aee7f06b1716d41408c885e06a7fecc8c26948"},{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":808,"context_line":"        # NOTE(gibi): get_instance_nw_info will re-query the port list from"},{"line_number":809,"context_line":"        # neutron. So if there was a parallel port.update from another"},{"line_number":810,"context_line":"        # nova-compute then the returned nw_info does not have all the ports"},{"line_number":811,"context_line":"        # that the original request. So we can detect the race here."},{"line_number":812,"context_line":"        owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":813,"context_line":"        lost_ports \u003d set(ordered_ports) - set(owned_ports)"},{"line_number":814,"context_line":"        if lost_ports:"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_a3e1ab9c","line":811,"updated":"2016-11-24 15:45:49.000000000","message":"I like how we don\u0027t add an extra call to neutron, this seems a good way of detecting the race.","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"},{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":812,"context_line":"        owned_ports \u003d [vif[\u0027id\u0027] for vif in nw_info]"},{"line_number":813,"context_line":"        lost_ports \u003d set(ordered_ports) - set(owned_ports)"},{"line_number":814,"context_line":"        if lost_ports:"},{"line_number":815,"context_line":"            self._unbind_ports(context,"},{"line_number":816,"context_line":"                               set(preexisting_port_ids) - set(lost_ports),"},{"line_number":817,"context_line":"                               neutron, neutron)"},{"line_number":818,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":819,"context_line":""},{"line_number":820,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":821,"context_line":"            # unnecessary reschedule"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_a3736b14","line":818,"range":{"start_line":815,"start_character":12,"end_line":818,"end_character":67},"updated":"2016-11-24 15:45:49.000000000","message":"Part of me wants to call deallocate_for_instance here, would that not be more correct? Although I guess we maybe call that elsewhere in the error handing? Not sure.","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"},{"author":{"_account_id":6062,"name":"jichenjc","email":"jichenjc@cn.ibm.com","username":"jichenjc"},"change_message_id":"fe4e2f309beeda77bbe36c57119789dbeedb1b27","unresolved":false,"context_lines":[{"line_number":818,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":819,"context_line":""},{"line_number":820,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":821,"context_line":"            # unnecessary reschedule"},{"line_number":822,"context_line":"            LOG.debug(\u0027Following ports are requested but already in use. %s\u0027"},{"line_number":823,"context_line":"                      % str(list(lost_ports)))"},{"line_number":824,"context_line":"            raise exception.PortsConflict(instance_uuid\u003dinstance.uuid,"}],"source_content_type":"text/x-python","patch_set":16,"id":"fa7ab95a_4d5f6175","line":821,"range":{"start_line":821,"start_character":26,"end_line":821,"end_character":36},"updated":"2016-08-31 12:54:46.000000000","message":"just curious, actually a race condition deserve a retry\nis it because we will keep network info during reschedule?","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"},{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":818,"context_line":"            self._delete_ports(neutron, instance, created_port_ids)"},{"line_number":819,"context_line":""},{"line_number":820,"context_line":"            # NOTE(gibi): cannot raise PortInUse exception as that will cause"},{"line_number":821,"context_line":"            # unnecessary reschedule"},{"line_number":822,"context_line":"            LOG.debug(\u0027Following ports are requested but already in use. %s\u0027"},{"line_number":823,"context_line":"                      % str(list(lost_ports)))"},{"line_number":824,"context_line":"            raise exception.PortsConflict(instance_uuid\u003dinstance.uuid,"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_e3a483ba","line":821,"range":{"start_line":821,"start_character":26,"end_line":821,"end_character":36},"in_reply_to":"fa7ab95a_4d5f6175","updated":"2016-11-24 15:45:49.000000000","message":"the race only occurs with ports not created by Nova, so no retry would actually succeed here. This is correct.\n\nI would argue we should just ensure that PortInUse never triggers a reschedule anyways.","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"},{"author":{"_account_id":6062,"name":"jichenjc","email":"jichenjc@cn.ibm.com","username":"jichenjc"},"change_message_id":"fe4e2f309beeda77bbe36c57119789dbeedb1b27","unresolved":false,"context_lines":[{"line_number":1088,"context_line":"        LOG.debug(\u0027deallocate_for_instance()\u0027, instance\u003dinstance)"},{"line_number":1089,"context_line":"        search_opts \u003d {\u0027device_id\u0027: instance.uuid}"},{"line_number":1090,"context_line":"        neutron \u003d get_client(context)"},{"line_number":1091,"context_line":"        data \u003d neutron.list_ports(**search_opts)"},{"line_number":1092,"context_line":"        ports \u003d [port[\u0027id\u0027] for port in data.get(\u0027ports\u0027, [])]"},{"line_number":1093,"context_line":""},{"line_number":1094,"context_line":"        requested_networks \u003d kwargs.get(\u0027requested_networks\u0027) or []"}],"source_content_type":"text/x-python","patch_set":16,"id":"fa7ab95a_b824adaf","line":1091,"range":{"start_line":1091,"start_character":23,"end_line":1091,"end_character":33},"updated":"2016-08-31 12:54:46.000000000","message":"not sure but can we search only ports belong to this instance?","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"},{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":1099,"context_line":"                             in requested_networks])"},{"line_number":1100,"context_line":"        # NOTE(gibi): do not try to unbind ports that are not owned"},{"line_number":1101,"context_line":"        # by the instance"},{"line_number":1102,"context_line":"        ports_to_skip \u0026\u003d set(ports)"},{"line_number":1103,"context_line":"        # NOTE(boden): requested_networks only passed in when deallocating"},{"line_number":1104,"context_line":"        # from a failed build / spawn call. Therefore we need to include"},{"line_number":1105,"context_line":"        # preexisting ports when deallocating from a standard delete op"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_e363a33f","line":1102,"updated":"2016-11-24 15:45:49.000000000","message":"I don\u0027t see a test for this one.","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"},{"author":{"_account_id":782,"name":"John Garbutt","email":"john@johngarbutt.com","username":"johngarbutt"},"change_message_id":"986af5e0cea3f2a3305c61e13f5382f2bd55fd6d","unresolved":false,"context_lines":[{"line_number":1487,"context_line":"                    port \u003d self._show_port(context, request.port_id,"},{"line_number":1488,"context_line":"                                           neutron_client\u003dneutron)"},{"line_number":1489,"context_line":"                    if port.get(\u0027device_id\u0027, None):"},{"line_number":1490,"context_line":"                        raise exception.PortInUse(port_id\u003drequest.port_id)"},{"line_number":1491,"context_line":"                    deferred_ip \u003d port.get(\u0027ip_allocation\u0027) \u003d\u003d \u0027deferred\u0027"},{"line_number":1492,"context_line":"                    # NOTE(carl_baldwin) A deferred IP port doesn\u0027t have an"},{"line_number":1493,"context_line":"                    # address here. If it fails to get one later when nova"}],"source_content_type":"text/x-python","patch_set":16,"id":"5a74a57a_437d371b","line":1490,"updated":"2016-11-24 15:45:49.000000000","message":"Ah, yeah, should really be doing port in use to match here I guess.","commit_id":"27bcb643337913d2aab3c58f1f1a9645031bff20"}]}
